text stringlengths 8 4.13M |
|---|
//! ARM64 drivers
pub use self::board::serial;
use super::board;
/// Initialize ARM64 common drivers
pub fn init() {
board::init_driver();
crate::drivers::console::init();
}
|
use std::time::SystemTime;
use ::rand::SeedableRng;
use kiss3d::{
light::Light,
nalgebra::{Translation, UnitQuaternion, Vector3},
window::Window,
};
use macroquad::prelude::*;
use rand_chacha::ChaCha8Rng;
use spawners::*;
use sprites::Sprites;
use systems::*;
mod ai_person;
mod building;
mod components;
mod spawners;
mod sprites;
mod systems;
mod traits;
#[macroquad::main("City Sim")]
async fn main() {
let mut rng = ChaCha8Rng::seed_from_u64(2);
let mut sprites = Sprites::default();
let ai_texture_index = sprites.add_sprite_from_path("textures/ai_player.png").await;
let building_texture_index = sprites
.add_sprite_from_path("textures/ai_travel_point.png")
.await;
let mut ai_people = spawn_ai_people(5, ai_texture_index, &mut rng);
let buildings = spawn_buildings(5, building_texture_index, &mut rng);
///////////////////////////////////////////
let mut window = Window::new("Kiss3d: cube");
let mut c = window.add_cube(1.0, 1.0, 1.0);
let mut d = window.add_cube(1.0, 1.0, 1.0);
c.set_color(1.0, 0.0, 0.0);
d.set_color(0.0, 1.0, 0.0);
window.set_light(Light::StickToCamera);
let rot = UnitQuaternion::from_axis_angle(&Vector3::y_axis(), 0.014);
let mut frame_time = 1u128;
while window.render() {
let now = SystemTime::now();
c.prepend_to_local_rotation(&rot);
frame_time = now.elapsed().expect("Framerate timer failed").as_millis();
}
///////////////////////////////////////////
// loop {
// if is_key_pressed(KeyCode::F) {
// println!("FPS: {}", get_fps());
// }
// clear_background(LIGHTGRAY);
// let frame_time = get_frame_time();
// ai_people.iter().for_each(|p| draw(p, &sprites));
// buildings.iter().for_each(|p| draw(p, &sprites));
// assign_travel_to_randomly(&mut ai_people, &buildings, &mut rng);
// travel(&mut ai_people, frame_time);
// idle_calorie_burn(&mut ai_people, frame_time);
// next_frame().await
// }
}
|
use engine::Event;
pub trait Input {
fn next_event(&mut self) -> Event;
} |
use async_trait::async_trait;
use bonsaidb_core::{
custom_api::CustomApi,
kv::Kv,
networking::{DatabaseRequest, DatabaseResponse, Request, Response},
schema::Schema,
};
#[async_trait]
impl<DB, A> Kv for super::RemoteDatabase<DB, A>
where
DB: Schema,
A: CustomApi,
{
async fn execute_key_operation(
&self,
op: bonsaidb_core::kv::KeyOperation,
) -> Result<bonsaidb_core::kv::Output, bonsaidb_core::Error> {
match self
.client
.send_request(Request::Database {
database: self.name.to_string(),
request: DatabaseRequest::ExecuteKeyOperation(op),
})
.await?
{
Response::Database(DatabaseResponse::KvOutput(output)) => Ok(output),
Response::Error(err) => Err(err),
other => Err(bonsaidb_core::Error::Networking(
bonsaidb_core::networking::Error::UnexpectedResponse(format!("{:?}", other)),
)),
}
}
}
|
use crate::first;
use crate::method::StdMethod;
use crate::name::Name;
use crate::operator::Operator;
use crate::runtime::Runtime;
use crate::string_var::StringVar;
use crate::variable::{FnResult, OptionVar, Variable};
pub fn str(this: OptionVar, runtime: &mut Runtime) -> Result<StringVar, ()> {
Result::Ok(if this.depth == 1 {
match this.value {
Option::Some(x) => format!("Some({})", x.str(runtime)?).into(),
Option::None => "null".into(),
}
} else {
match this.value {
Option::Some(x) => fold_some(this.depth, &*x.str(runtime)?),
Option::None => fold_some(this.depth, "null"),
}
})
}
pub fn repr(this: OptionVar, runtime: &mut Runtime) -> Result<StringVar, ()> {
Result::Ok(if this.depth == 1 {
match this.value {
Option::Some(x) => format!("Some({})", x.repr(runtime)?).into(),
Option::None => "null".into(),
}
} else {
match this.value {
Option::Some(x) => fold_some(this.depth, &*x.repr(runtime)?),
Option::None => fold_some(this.depth, "null"),
}
})
}
pub fn hash(this: OptionVar, runtime: &mut Runtime) -> Result<usize, ()> {
match Option::<Variable>::from(this) {
Option::Some(x) => x.hash(runtime),
Option::None => Result::Ok(0),
}
}
fn fold_some(i: usize, x: &str) -> StringVar {
let prefix = "Some(".repeat(i);
let suffix = ")".repeat(i);
(prefix + x + &*suffix).into()
}
pub fn get_attr(this: OptionVar, attr: &str) -> Variable {
let func = match attr {
"map" => map_fn,
"flatMap" => flat_map,
_ => unimplemented!("Option.{}", attr),
};
StdMethod::new_native(this, func).into()
}
pub fn get_op(this: OptionVar, op: Operator) -> Variable {
let func = match op {
Operator::Str => to_str,
Operator::Repr => to_repr,
Operator::Hash => to_hash,
_ => unimplemented!("Option.{}", op.name()),
};
StdMethod::new_native(this, func).into()
}
pub fn index(this: OptionVar, name: Name) -> Variable {
match name {
Name::Attribute(a) => get_attr(this, a),
Name::Operator(o) => get_op(this, o),
}
}
pub fn call_op(
this: OptionVar,
op: Operator,
args: Vec<Variable>,
runtime: &mut Runtime,
) -> FnResult {
get_op(this, op).call((args, runtime))
}
fn map_fn(this: OptionVar, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert_eq!(args.len(), 1);
let result = match this.into() {
Option::Some(val) => {
first(args).call((vec![val], runtime))?;
Option::Some(runtime.pop_return())
}
Option::None => Option::None,
};
runtime.return_1(result.into())
}
fn flat_map(this: OptionVar, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert_eq!(args.len(), 1);
match this.into() {
Option::Some(val) => {
first(args).call((vec![val], runtime))?;
let val = runtime.pop_return();
runtime.return_1(val)
}
Option::None => runtime.return_1(Option::None.into()),
}
}
fn to_str(this: OptionVar, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert!(args.is_empty());
let val = str(this, runtime)?;
runtime.return_1(val.into())
}
fn to_repr(this: OptionVar, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert!(args.is_empty());
let val = repr(this, runtime)?;
runtime.return_1(val.into())
}
fn to_hash(this: OptionVar, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert!(args.is_empty());
let val = hash(this, runtime)?;
runtime.return_1(val.into())
}
#[cfg(test)]
mod test {
use crate::builtin_functions::option_fn::{to_repr, to_str};
use crate::int_var::IntVar;
use crate::runtime::Runtime;
use crate::string_var::StringVar;
use num::One;
#[test]
fn string() {
let some = Option::Some(IntVar::one().into());
let none = Option::None;
let result = Runtime::test(|runtime| to_str(some.into(), vec![], runtime));
assert_eq!(result, Result::Ok(StringVar::from("Some(1)").into()));
let result = Runtime::test(|runtime| to_str(none.into(), vec![], runtime));
assert_eq!(result, Result::Ok(StringVar::from("null").into()));
}
#[test]
fn repr() {
let some = Option::Some(IntVar::one().into());
let none = Option::None;
let result = Runtime::test(|runtime| to_repr(some.into(), vec![], runtime));
assert_eq!(result, Result::Ok(StringVar::from("Some(1)").into()));
let result = Runtime::test(|runtime| to_repr(none.into(), vec![], runtime));
assert_eq!(result, Result::Ok(StringVar::from("null").into()));
}
}
|
// Copyright 2020 The VectorDB Authors.
//
// Code is licensed under Apache License, Version 2.0.
use crate::datums::Datum;
use crate::errors::Error;
use super::*;
pub trait IExpression {
fn eval(&self) -> Result<Datum, Error>;
}
pub enum Expression {
Constant(ConstantExpression),
Variable(VariableExpression),
Binary(BinaryExpression),
}
impl From<ConstantExpression> for Expression {
fn from(v: ConstantExpression) -> Self {
Expression::Constant(v)
}
}
impl From<VariableExpression> for Expression {
fn from(v: VariableExpression) -> Self {
Expression::Variable(v)
}
}
impl From<BinaryExpression> for Expression {
fn from(v: BinaryExpression) -> Self {
Expression::Binary(v)
}
}
impl IExpression for Expression {
fn eval(&self) -> Result<Datum, Error> {
match self {
Expression::Constant(v) => v.eval(),
Expression::Variable(v) => v.eval(),
Expression::Binary(v) => v.eval(),
}
}
}
|
// mod ptrs;
use std::fmt;
use std::mem;
#[macro_use]
extern crate log;
pub trait Dict<T> {
type K;
fn empty() -> Self;
fn insert(&mut self, key: Self::K, val: T);
fn remove(&mut self, key: &Self::K) -> Option<T>;
fn lookup(&self, key: &Self::K) -> Option<&T>;
}
#[derive(Clone,Debug,PartialOrd,Ord,PartialEq,Eq,Hash)]
pub enum Trie<T> {
Empty,
// Key * T
Lf(u64, T),
// Prefix * Mask * left * right
Br(u64, u64, Box<Trie<T>>, Box<Trie<T>>),
}
// The trace! invocations end up with a grammar that matches: '^(e|B*[Llb])$'
// start := empty | tree
// empty := Empty
// tree := Matching branch
//
impl<T: fmt::Debug> Trie<T> {
fn ins(&mut self, key: u64, val: T) {
// debug!("#insert: {:?} <- {:?}={:?}", self, key, val);
match self {
&mut Trie::Empty => {
trace!("e");
*self = Trie::Lf(key, val);
}
&mut Trie::Lf(k, ref mut v) if k == key => {
trace!("L");
*v = val;
}
&mut Trie::Lf(_, _) => {
trace!("l");
self.join(Trie::Lf(key, val));
}
&mut Trie::Br(p, m, ref mut l, ref mut r) if Self::match_prefix(key, p, m) => {
trace!("B");
let leftp = Self::zerobit(key, m);
// debug!("zerobit({:#b}, {:#b}) => {:?}; branch:{:?}", key, m, leftp, if leftp { &*l } else { &*r });
if leftp {
l.ins(key, val);
} else {
r.ins(key, val);
};
}
&mut Trie::Br(_, _, _, _) => {
trace!("b");
self.join(Trie::Lf(key, val));
}
};
// debug!("#inserted: {:?}", new);
}
fn zerobit(key: u64, msk: u64) -> bool {
key & msk == 0
}
fn mask(key: u64, msk: u64) -> u64 {
let mask = msk - 1;
key & mask
}
fn branch_bit(a: u64, b: u64) -> u64 {
let diff = a ^ b;
let bb = diff & (!diff + 1);
// debug!("branch_bit: a:{:#b}; b:{:#b}; diff:{:#b}; bb:{:#b}", a, b, diff, bb);
assert_eq!(bb.count_ones(), 1);
assert_eq!(Self::mask(a, bb), Self::mask(b, bb));
bb
}
fn join(&mut self, t1: Self) {
// debug!("join:{:#b}:{:?}; {:#b}:{:?}", p0, self, p1, t1);
let t0 = mem::replace(self, Trie::Empty);
let p0 = t0.prefix();
let p1 = t1.prefix();
let m = Self::branch_bit(p0, p1);
// debug!("join branch mask:{:?}; samep: {:?}", m, Self::zerobit(p0, m));
if Self::zerobit(p0, m) {
*self = Self::br(Self::mask(p0, m), m, Box::new(t0), Box::new(t1))
} else {
*self = Self::br(Self::mask(p0, m), m, Box::new(t1), Box::new(t0))
};
// debug!("join: => {:?}", self );
}
fn prefix(&self) -> u64 {
match self {
&Trie::Empty => 0,
&Trie::Lf(k, _) => k,
&Trie::Br(p, _, _, _) => p,
}
}
fn match_prefix(k: u64, p: u64, m: u64) -> bool {
Self::mask(k, m) == p
}
fn br(prefix: u64, mask: u64, left: Box<Trie<T>>, right: Box<Trie<T>>) -> Self {
match (&*left, &*right) {
(&Trie::Empty, &Trie::Empty) => Trie::Empty,
(&Trie::Empty, _) => *right,
(_, &Trie::Empty) => *left,
(_, _) => Trie::Br(prefix, mask, left, right),
}
}
fn del(&mut self, key: &u64) -> Option<T> {
// debug!("#delert: {:?} <- {:?}", self, key);
let removed = match self {
&mut Trie::Empty => None,
&mut Trie::Lf(_, _) if &self.prefix() == key => {
if let Trie::Lf(_, val) = mem::replace(self, Trie::Empty) {
Some(val)
} else {
unreachable!()
}
}
&mut Trie::Lf(_, _) => None,
&mut Trie::Br(p, m, ref mut l, ref mut r) if Self::match_prefix(*key, p, m) => {
let leftp = Self::zerobit(*key, m);
// debug!("zerobit({:#b}, {:#b}) => {:?}; branch:{:?}", key, m, leftp, if leftp { l } else { r });
if leftp {
l.del(key)
} else {
r.del(key)
}
}
&mut Trie::Br(_, _, _, _) => None,
};
// debug!("#delerted: {:?}", new);
if let Some(_) = removed {
self.canonify();
}
removed
}
fn canonify(&mut self) {
let t = mem::replace(self, Trie::Empty);
let new = match t {
Trie::Br(p, m, l, r) => {
match (*l, *r) {
(Trie::Empty, Trie::Empty) => (Trie::Empty),
(Trie::Empty, r) => (r),
(l, Trie::Empty) => (l),
(l, r) => (Trie::Br(p, m, Box::new(l), Box::new(r))),
}
}
val => (val),
};
*self = new;
}
}
impl<T: Clone + fmt::Debug> Dict<T> for Trie<T> {
type K = u64;
fn empty() -> Self {
Trie::Empty
}
fn insert(&mut self, key: Self::K, val: T) {
self.ins(key, val);
}
fn lookup(&self, key: &Self::K) -> Option<&T> {
// debug!("#lookup: {:?} <- {:#b}", self, key);
match self {
&Trie::Empty => None,
&Trie::Lf(k, ref v) if k == *key => Some(v),
&Trie::Lf(_, _) => None,
&Trie::Br(p, m, _, _) if !Self::match_prefix(*key, p, m) => None,
&Trie::Br(_, m, ref l, ref r) => {
let leftp = Self::zerobit(*key, m);
let branch = if leftp {
l
} else {
r
};
// debug!("zerobit({:#b}, {:#b}) => {:?}; branch:{:?}", key, m, leftp, branch);
branch.lookup(key)
}
}
}
fn remove(&mut self, key: &Self::K) -> Option<T> {
let removed = self.del(key);
removed
}
}
#[cfg(test)]
mod tests {
extern crate quickcheck;
extern crate env_logger;
use std::collections::{BTreeMap, BTreeSet};
use super::{Trie, Dict};
use self::quickcheck::TestResult;
use std::hash::{SipHasher, Hash, Hasher};
#[test]
fn it_works() {
env_logger::init().unwrap_or(());
fn prop_works(insert: Vec<(u64, u64)>, probe: u64) -> () {
let mut d = Trie::empty();
let mut m = BTreeMap::new();
for (k, v) in insert {
println!("");
d.insert(k, v);
m.insert(k, v);
}
debug!("m: {:?}; d: {:?}", m, d);
let mres = m.get(&probe);
let res = d.lookup(&probe);
debug!("eq? {:?}", res == mres);
assert_eq!(res, mres);
}
quickcheck::quickcheck(prop_works as fn(Vec<(u64, u64)>, u64) -> ());
}
#[test]
fn should_add_remove() {
env_logger::init().unwrap_or(());
fn prop_works(insert: Vec<(u64, u64)>, remove: Vec<u64>, probe: u64) -> () {
debug!("{:?}", (&insert, &remove, &probe));
let mut d = Trie::empty();
let mut m = BTreeMap::new();
for (k, v) in insert {
d.insert(k, v);
m.insert(k, v);
}
debug!("m: {:?}; d: {:?}", m, d);
let mut ours = Vec::new();
let mut theirs = Vec::new();
for k in remove {
ours.push(d.remove(&k));
theirs.push(m.remove(&k));
}
let mres = m.get(&probe);
let res = d.lookup(&probe);
debug!("eq? {:?}", res == mres);
debug!("removed {:?} == {:?} -> {:?}", ours, theirs, ours == theirs);
debug!("");
assert_eq!((res, ours), (mres, theirs));
}
quickcheck::quickcheck(prop_works as fn(Vec<(u64, u64)>, Vec<u64>, u64) -> ());
}
#[test]
fn canonical_under_permutation() {
env_logger::init().unwrap_or(());
fn prop_works(insert: Vec<u64>, swaps: Vec<(usize, usize)>) -> TestResult {
if insert.len() == 0 {
return TestResult::discard();
}
println!("{:?}", (&insert, &swaps));
let mut permuted = insert.clone();
let len = permuted.len();
for (a, b) in swaps {
permuted.swap(a % len, b % len);
}
if insert == permuted {
return TestResult::discard();
}
println!("insert: {:?}; permuted: {:?}", insert, permuted);
let mut a = Trie::empty();
for k in insert {
a.insert(k, k);
}
let mut b = Trie::empty();
for k in permuted {
b.insert(k, k);
}
println!("orig-order: {:?}; permuted-order: {:?}", a, b);
println!("eq? {:?}", a == b);
println!("");
assert_eq!(a, b);
assert_eq!(hash(&a), hash(&b));
TestResult::from_bool(a == b)
}
quickcheck::quickcheck(prop_works as fn(Vec<u64>, Vec<(usize, usize)>) -> TestResult);
}
#[test]
fn canonical_under_removal() {
env_logger::init().unwrap_or(());
fn prop_works(insert: Vec<u64>, removals: BTreeSet<u64>) -> TestResult {
debug!("{:?}", (&insert, &removals));
let mut a = Trie::empty();
let mut b = Trie::empty();
for k in insert.iter().filter(|v| !removals.contains(v)) {
a.insert(*k, *k);
}
debug!("no-add: {:?}", a);
for k in insert.iter() {
b.insert(*k, *k);
}
debug!("all-added: {:?}", b);
for r in removals {
b.remove(&r);
}
debug!("all-removed: {:?}", b);
debug!("no-add: {:?}; add+remove: {:?}", a, b);
debug!("eq? {:?}", a == b);
debug!("");
assert_eq!(a, b);
assert_eq!(hash(&a), hash(&b));
TestResult::from_bool(a == b)
}
quickcheck::quickcheck(prop_works as fn(Vec<u64>, BTreeSet<u64>) -> TestResult);
}
fn hash<T: Hash>(val: T) -> u64 {
let mut h = SipHasher::new();
val.hash(&mut h);
h.finish()
}
}
|
use std::path::*;
use std::env::Args;
use std::process::Command;
use std::os::unix::prelude::CommandExt;
use crate::config;
pub fn run(bin: &str, args: Args) {
// no -c argument available in this case
let erl_dir = config::erl_to_use();
let cmd = Path::new(&erl_dir).join("bin").join(bin);
debug!("running {}", cmd.to_str().unwrap());
let _ = Command::new(cmd.to_str().unwrap()).args(args).exec();
}
|
//! Module for all game lobby code. The Lobby represents the interface between
//! the actual game, the database, and player connections.
mod client;
mod lobby_impl;
use crate::game::{snapshot::GameSnapshot, Action, Message};
pub use lobby_impl::Lobby;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use tokio::sync::{mpsc::Sender, oneshot};
use tokio::task;
use warp::filters::ws::WebSocket;
/// Type representing a oneshot sender back to the caller.
pub type ResponseChannel = oneshot::Sender<LobbyResponse>;
/// Type representing a channel to the lobby to issue commands.
pub type LobbyChannel = Sender<(LobbyCommand, Option<ResponseChannel>)>;
/// Enum of possible lobby-related errors.
#[derive(Debug, Error)]
pub enum LobbyError {
#[error("An error occurred while updating the database.")]
DatabaseError,
#[error("The player is already in the game.")]
DuplicatePlayerError,
#[error("A lobby update was attemped in a state that doesn't permit this update.")]
InvalidStateError,
#[error("An unknown error occurred. See logs for details.")]
UnknownError,
#[error("Client ID is not registered for the game.")]
InvalidClientID,
#[error("The player tried to reconnect with a new name.")]
NameChangeOnReconnectError,
#[error("The display name is already in use.")]
DuplicateDisplayName,
}
/// Enum of available commands to send to the lobby.
pub enum LobbyCommand {
AddPlayer {
player_id: String,
display_name: String,
},
GetFriendCode,
IsClientRegistered {
client_id: String,
},
ConnectClientChannels {
client_id: String,
ws: WebSocket,
},
Ping {
client_id: String,
},
GetLobbyState {
client_id: String,
},
StartGame,
EndGame,
PlayerDisconnect {
client_id: String,
},
GetPlayerList {
client_id: String,
},
GetSnapshots {
client_id: String,
},
PlayerFocusChange {
client_id: String,
is_tabbed_out: bool,
},
}
/// Enum of possible responses from the lobby.
#[derive(Debug)]
pub enum LobbyResponse {
Standard(Result<(), LobbyError>),
None,
JoinGame(Result<String, LobbyError>),
FriendCode(String),
IsClientRegistered(bool),
}
/// An incoming message from the client.
#[derive(Deserialize)]
#[serde(tag = "messageType", content = "data")]
enum IncomingMessage {
Ping,
StartGame,
GetLobbyState,
GameCommand(Action),
GetPlayerList,
GetSnapshot,
PlayerFocusChange(bool),
}
/// An outgoing message to the client.
#[derive(Serialize)]
#[serde(tag = "messageType", content = "data")]
pub enum OutgoingMessage {
Pong(String),
PlayerList(Vec<String>),
LobbyState(LobbyState),
GameMessage(Message),
Snapshot(GameSnapshot),
PlayerFocusChange {
displayName: String,
isTabbedOut: bool,
},
}
#[derive(Serialize, Eq, PartialEq, Clone)]
#[serde(tag = "state")]
pub enum LobbyState {
Lobby,
Game,
Finished,
}
|
/*!
```rudra-poc
[target]
crate = "livesplit-core"
version = "0.11.0"
[report]
issue_url = "https://github.com/LiveSplit/livesplit-core/issues/400"
issue_date = 2021-01-26
[[bugs]]
analyzer = "UnsafeDataflow"
bug_class = "UninitExposure"
bug_count = 2
rudra_report_locations = [
"src/run/parser/llanfair.rs:42:1: 52:2",
"src/run/parser/llanfair.rs:55:1: 203:2",
]
```
!*/
#![forbid(unsafe_code)]
fn main() {
panic!("This issue was reported without PoC");
}
|
use super::MacroValue;
use std::fs::File;
use std::error::Error;
use std::io::prelude::*;
pub fn include(params: Vec<MacroValue>) -> Result<MacroValue, Box<Error>> {
assert!(params.len() == 1);
let mut file = File::open(params[0].clone().try_as_string()?)?;
let mut buffer = String::new();
file.read_to_string(&mut buffer)?;
return Ok(MacroValue::STRING(buffer));
}
pub fn eq(params: Vec<MacroValue>) -> Result<MacroValue, Box<Error>> {
Ok(MacroValue::BOOL(params[0] == params[1]))
} |
use crate::models::{Item, ItemColor, ItemId, ItemImageList, ItemType, RelatedItem};
use actix_web::{error, web, HttpResponse, Result};
use anyhow::anyhow;
use database::models::{
Comic as DatabaseComic, Item as DatabaseItem, RelatedItem as RelatedDatabaseItem,
};
use database::DbPool;
use std::convert::{TryFrom, TryInto};
pub(crate) async fn by_id(
pool: web::Data<DbPool>,
item_id: web::Path<ItemId>,
) -> Result<HttpResponse> {
let item_id = item_id.into_inner();
let mut conn = pool
.acquire()
.await
.map_err(error::ErrorInternalServerError)?;
let item = DatabaseItem::by_id(&mut conn, item_id.into_inner())
.await
.map_err(error::ErrorInternalServerError)?
.ok_or_else(|| error::ErrorNotFound(anyhow!("No item with id {} exists", item_id)))?;
let item_occurrence =
DatabaseItem::first_and_last_apperance_and_count_by_id(&mut conn, item_id.into_inner())
.await
.map_err(error::ErrorInternalServerError)?;
let total_comics = DatabaseComic::count(&mut conn)
.await
.map_err(error::ErrorInternalServerError)?;
let image_count = DatabaseItem::image_count_by_id(&mut conn, item_id.into_inner())
.await
.map_err(error::ErrorInternalServerError)?;
let item = Item {
id: item_id,
short_name: item.short_name,
name: item.name,
r#type: ItemType::try_from(&*item.r#type).map_err(error::ErrorInternalServerError)?,
color: ItemColor(item.color_red, item.color_green, item.color_blue),
first: item_occurrence
.first
.map(TryInto::try_into)
.transpose()
.expect("database has valid comicIds")
.unwrap_or_default(),
last: item_occurrence
.last
.map(TryInto::try_into)
.transpose()
.expect("database has valid comicIds")
.unwrap_or_default(),
appearances: item_occurrence.count,
total_comics,
presence: if total_comics == 0 {
0.0
} else {
item_occurrence.count as f64 * 100.0 / total_comics as f64
},
has_image: image_count > 0,
};
Ok(HttpResponse::Ok().json(item))
}
pub(crate) async fn friends(
pool: web::Data<DbPool>,
item_id: web::Path<u16>,
) -> Result<HttpResponse> {
let items = related_items(pool, *item_id, ItemType::Cast, 5).await?;
Ok(HttpResponse::Ok().json(items))
}
pub(crate) async fn locations(
pool: web::Data<DbPool>,
item_id: web::Path<u16>,
) -> Result<HttpResponse> {
let items = related_items(pool, *item_id, ItemType::Location, 5).await?;
Ok(HttpResponse::Ok().json(items))
}
pub(crate) async fn images(
pool: web::Data<DbPool>,
item_id: web::Path<u16>,
) -> Result<HttpResponse> {
let item_image_list =
DatabaseItem::image_metadatas_by_id_with_mapping(&***pool, *item_id, ItemImageList::from)
.await
.map_err(error::ErrorInternalServerError)?;
Ok(HttpResponse::Ok().json(item_image_list))
}
pub(crate) async fn image(
pool: web::Data<DbPool>,
image_id: web::Path<i32>,
) -> Result<HttpResponse> {
let image = DatabaseItem::image_by_image_id(&***pool, *image_id)
.await
.map_err(error::ErrorInternalServerError)?
.ok_or_else(|| {
error::ErrorNotFound(anyhow!("No item image with id {} exists", *image_id))
})?;
Ok(HttpResponse::Ok().content_type("image/png").body(image))
}
async fn related_items(
pool: web::Data<DbPool>,
item_id: u16,
r#type: ItemType,
amount: i64,
) -> Result<Vec<RelatedItem>> {
DatabaseItem::related_items_by_id_and_type_with_mapping(
&***pool,
item_id,
r#type.into(),
amount,
|ri| {
let RelatedDatabaseItem {
id,
short_name,
name,
r#type,
color_red,
color_green,
color_blue,
count,
} = ri;
let id = id.into();
RelatedItem {
id,
short_name,
name,
r#type: ItemType::try_from(&*r#type).expect("Item types in the database are valid"),
color: ItemColor(color_red, color_green, color_blue),
count,
}
},
)
.await
.map_err(error::ErrorInternalServerError)
}
|
use crate::{
exec::Interpreter,
js::{
function::NativeFunctionData,
value::{from_value, to_value, FromValue, ResultValue, ToValue, Value, ValueData},
},
};
use gc::Gc;
use gc_derive::{Finalize, Trace};
use std::{borrow::Borrow, collections::HashMap, ops::Deref};
/// Static `prototype`, usually set on constructors as a key to point to their respective prototype object.
/// As this string will be used a lot throughout the program, its best being a static global string which will be referenced
pub static PROTOTYPE: &str = "prototype";
/// Static `__proto__`, usually set on Object instances as a key to point to their respective prototype object.
/// As this string will be used a lot throughout the program, its best being a static global string which will be referenced
pub static INSTANCE_PROTOTYPE: &str = "__proto__";
/// `ObjectData` is the representation of an object in JavaScript
#[derive(Trace, Finalize, Debug, Clone)]
pub struct Object {
/// Kind
pub kind: ObjectKind,
/// Internal Slots
pub internal_slots: Box<HashMap<String, Value>>,
/// Properties
pub properties: Box<HashMap<String, Property>>,
/// Symbol Properties
pub sym_properties: Box<HashMap<usize, Property>>,
}
impl Object {
/// Return a new ObjectData struct, with `kind` set to Ordinary
pub fn default() -> Self {
Object {
kind: ObjectKind::Ordinary,
internal_slots: Box::new(HashMap::new()),
properties: Box::new(HashMap::new()),
sym_properties: Box::new(HashMap::new()),
}
}
/// Return a new Boolean object whose [[BooleanData]] internal slot is set to argument.
fn from_boolean(argument: &Value) -> Self {
let mut obj = Object {
kind: ObjectKind::Boolean,
internal_slots: Box::new(HashMap::new()),
properties: Box::new(HashMap::new()),
sym_properties: Box::new(HashMap::new()),
};
obj.internal_slots
.insert("BooleanData".to_string(), argument.clone());
obj
}
/// Return a new Number object whose [[NumberData]] internal slot is set to argument.
fn from_number(argument: &Value) -> Self {
let mut obj = Object {
kind: ObjectKind::Number,
internal_slots: Box::new(HashMap::new()),
properties: Box::new(HashMap::new()),
sym_properties: Box::new(HashMap::new()),
};
obj.internal_slots
.insert("NumberData".to_string(), argument.clone());
obj
}
/// Return a new String object whose [[StringData]] internal slot is set to argument.
fn from_string(argument: &Value) -> Self {
let mut obj = Object {
kind: ObjectKind::String,
internal_slots: Box::new(HashMap::new()),
properties: Box::new(HashMap::new()),
sym_properties: Box::new(HashMap::new()),
};
obj.internal_slots
.insert("StringData".to_string(), argument.clone());
obj
}
// https://tc39.es/ecma262/#sec-toobject
pub fn from(value: &Value) -> Result<Self, ()> {
match *value.deref().borrow() {
ValueData::Boolean(_) => Ok(Self::from_boolean(value)),
ValueData::Number(_) => Ok(Self::from_number(value)),
ValueData::String(_) => Ok(Self::from_string(value)),
_ => Err(()),
}
}
}
#[derive(Trace, Finalize, Clone, Debug)]
pub enum ObjectKind {
Function,
Array,
String,
Symbol,
Error,
Ordinary,
Boolean,
Number,
}
/// A Javascript Property AKA The Property Descriptor
/// [[SPEC] - The Property Descriptor Specification Type](https://tc39.github.io/ecma262/#sec-property-descriptor-specification-type)
/// [[SPEC] - Default Attribute Values](https://tc39.github.io/ecma262/#table-4)
#[derive(Trace, Finalize, Clone, Debug)]
pub struct Property {
/// If the type of this can be changed and this can be deleted
pub configurable: bool,
/// If the property shows up in enumeration of the object
pub enumerable: bool,
/// If this property can be changed with an assignment
pub writable: bool,
/// The value associated with the property
pub value: Value,
/// The function serving as getter
pub get: Value,
/// The function serving as setter
pub set: Value,
}
impl Property {
/// Checks if the provided Value can be used as a property key.
pub fn is_property_key(value: &Value) -> bool {
value.is_string() // || value.is_symbol() // Uncomment this when we are handeling symbols.
}
/// Make a new property with the given value
pub fn new(value: Value) -> Self {
Self {
configurable: false,
enumerable: false,
writable: false,
value,
get: Gc::new(ValueData::Undefined),
set: Gc::new(ValueData::Undefined),
}
}
}
impl ToValue for Property {
fn to_value(&self) -> Value {
let prop = ValueData::new_obj(None);
prop.set_field_slice("configurable", to_value(self.configurable));
prop.set_field_slice("enumerable", to_value(self.enumerable));
prop.set_field_slice("writable", to_value(self.writable));
prop.set_field_slice("value", self.value.clone());
prop.set_field_slice("get", self.get.clone());
prop.set_field_slice("set", self.set.clone());
prop
}
}
impl FromValue for Property {
fn from_value(v: Value) -> Result<Self, &'static str> {
Ok(Self {
configurable: from_value(v.get_field_slice("configurable")).unwrap(),
enumerable: from_value(v.get_field_slice("enumerable")).unwrap(),
writable: from_value(v.get_field_slice("writable")).unwrap(),
value: v.get_field_slice("value"),
get: v.get_field_slice("get"),
set: v.get_field_slice("set"),
})
}
}
/// Create a new object
pub fn make_object(_: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {
Ok(Gc::new(ValueData::Undefined))
}
/// Get the prototype of an object
pub fn get_proto_of(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {
let obj = args.get(0).unwrap();
Ok(obj.get_field_slice(INSTANCE_PROTOTYPE))
}
/// Set the prototype of an object
pub fn set_proto_of(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {
let obj = args.get(0).unwrap().clone();
let proto = args.get(1).unwrap().clone();
obj.set_internal_slot(INSTANCE_PROTOTYPE, proto);
Ok(obj)
}
/// Define a property in an object
pub fn define_prop(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {
let obj = args.get(0).unwrap();
let prop = from_value::<String>(args.get(1).unwrap().clone()).unwrap();
let desc = from_value::<Property>(args.get(2).unwrap().clone()).unwrap();
obj.set_prop(prop, desc);
Ok(Gc::new(ValueData::Undefined))
}
/// To string
pub fn to_string(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {
Ok(to_value(this.to_string()))
}
/// Check if it has a property
pub fn has_own_prop(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {
let prop = if args.is_empty() {
None
} else {
from_value::<String>(args.get(0).unwrap().clone()).ok()
};
Ok(to_value(
prop.is_some() && this.get_prop(&prop.unwrap()).is_some(),
))
}
/// Create a new `Object` object
pub fn _create(global: &Value) -> Value {
let object = to_value(make_object as NativeFunctionData);
let prototype = ValueData::new_obj(Some(global));
prototype.set_field_slice(
"hasOwnProperty",
to_value(has_own_prop as NativeFunctionData),
);
prototype.set_field_slice("toString", to_value(to_string as NativeFunctionData));
object.set_field_slice("length", to_value(1_i32));
object.set_field_slice(PROTOTYPE, prototype);
object.set_field_slice(
"setPrototypeOf",
to_value(set_proto_of as NativeFunctionData),
);
object.set_field_slice(
"getPrototypeOf",
to_value(get_proto_of as NativeFunctionData),
);
object.set_field_slice(
"defineProperty",
to_value(define_prop as NativeFunctionData),
);
object
}
/// Initialise the `Object` object on the global object
pub fn init(global: &Value) {
global.set_field_slice("Object", _create(global));
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn is_property_key_test() {
let v = Value::new(ValueData::String(String::from("Boop")));
assert!(Property::is_property_key(&v));
let v = Value::new(ValueData::Boolean(true));
assert!(!Property::is_property_key(&v));
}
}
|
#![no_std]
use bootloader::boot_info::{FrameBufferInfo, PixelFormat};
use conquer_once::spin::OnceCell;
use spin::Mutex;
use core::{
fmt::{self},
ptr,
};
use font8x8::UnicodeFonts;
/// The global Writer instance used for the `log` crate.
pub static WRITER: OnceCell<Mutex<Writer>> = OnceCell::uninit();
/// A [`Writer`] instance protected by a spinlock.
/// Additional vertical space between lines
const LINE_SPACING: usize = 0;
/// Allows logging text to a pixel-based framebuffer.
pub struct Writer {
framebuffer: &'static mut [u8],
pub info: FrameBufferInfo,
x_pos: usize,
y_pos: usize,
}
impl Writer {
/// Creates a new Writer that uses the given framebuffer.
pub fn new(framebuffer: &'static mut [u8], info: FrameBufferInfo) -> Writer {
let mut writer = Self {
framebuffer,
info,
x_pos: 0,
y_pos: 0,
};
writer.clear();
writer
}
fn newline(&mut self) {
self.y_pos += 8 + LINE_SPACING;
self.carriage_return()
}
fn carriage_return(&mut self) {
self.x_pos = 0;
}
/// Erases all text on the screen.
pub fn clear(&mut self) {
self.x_pos = 0;
self.y_pos = 0;
self.framebuffer.fill(0);
}
fn width(&self) -> usize {
self.info.horizontal_resolution
}
fn height(&self) -> usize {
self.info.vertical_resolution
}
fn write_char(&mut self, c: char) {
match c {
'\n' => self.newline(),
'\r' => self.carriage_return(),
c => {
if self.x_pos >= self.width() {
self.newline();
}
if self.y_pos >= (self.height() - 8) {
self.clear();
}
let rendered = font8x8::BASIC_FONTS
.get(c)
.expect("character not found in basic font");
self.write_rendered_char(rendered);
}
}
}
fn write_rendered_char(&mut self, rendered_char: [u8; 8]) {
for (y, byte) in rendered_char.iter().enumerate() {
for (x, bit) in (0..8).enumerate() {
let alpha = if *byte & (1 << bit) == 0 { 0 } else { 255 };
self.write_pixel(self.x_pos + x, self.y_pos + y, alpha);
}
}
self.x_pos += 8;
}
pub fn write_pixel(&mut self, x: usize, y: usize, intensity: u8) {
let pixel_offset = y * self.info.stride + x;
let color = match self.info.pixel_format {
PixelFormat::RGB => [intensity, intensity, intensity / 2, 0],
PixelFormat::BGR => [intensity / 2, intensity, intensity, 0],
PixelFormat::U8 => [if intensity > 200 { 0xf } else { 0 }, 0, 0, 0],
_ => panic!("Unresolved pixel format")
};
let bytes_per_pixel = self.info.bytes_per_pixel;
let byte_offset = pixel_offset * bytes_per_pixel;
self.framebuffer[byte_offset..(byte_offset + bytes_per_pixel)]
.copy_from_slice(&color[..bytes_per_pixel]);
let _ = unsafe { ptr::read_volatile(&self.framebuffer[byte_offset]) };
}
}
/// Formats argument and calls the _print function
#[allow(unreachable_code)]
#[macro_export]
macro_rules! print {
($($arg:tt)*) => ($crate::_print(format_args!($($arg)*)));
}
/// Concatenation an endl and calls print macro
#[macro_export]
macro_rules! println {
() => (print!("\n"));
($($arg:tt)*) => (print!("{}\n", format_args!($($arg)*)));
}
/// Print function that the macros are derived from
#[doc(hidden)]
pub fn _print(args: fmt::Arguments) {
use core::fmt::Write;
use x86_64::instructions::interrupts;
interrupts::without_interrupts(|| {
WRITER.get().unwrap().lock().write_fmt(args).unwrap();
});
}
unsafe impl Send for Writer {}
unsafe impl Sync for Writer {}
impl fmt::Write for Writer {
fn write_str(&mut self, s: &str) -> fmt::Result {
for c in s.chars() {
self.write_char(c);
}
Ok(())
}
} |
fn main() {
let gamma_bits = gamma(&input());
let gamma = to_dec(&gamma_bits);
let epsilon = to_dec(&invert(&gamma_bits));
println!(
"First solution: gamma={}, epsilon={}, product={}",
gamma,
epsilon,
gamma * epsilon
);
let oxygen = to_dec(&life_support(input(), false));
let co2 = to_dec(&life_support(input(), true));
println!(
"Second solution: oxygen={}, co2={}, product={}",
oxygen,
co2,
oxygen * co2
);
}
type Binary = Vec<i32>;
fn life_support(mut it: Vec<Binary>, co2: bool) -> Binary {
let mut i = 0;
while it.len() > 1 {
let gamma = gamma(&it);
it = it
.into_iter()
.filter(|b| b[i] == gamma[i] ^ co2 as i32)
.collect();
i += 1;
}
it.remove(0)
}
fn to_dec(b: &Binary) -> i32 {
let mut acc = 0;
for (i, n) in b.iter().rev().copied().enumerate() {
acc |= n << i;
}
acc
}
fn invert(b: &Binary) -> Binary {
b.iter().map(|&n| n ^ 1).collect()
}
/// gamma returns a pattern of the most common bits for each position.
fn gamma(it: &Vec<Binary>) -> Binary {
let mut iter = it.clone().into_iter();
let mut acc = iter.next().unwrap();
for n in iter {
for i in 0..acc.len() {
acc[i] += 2 * n[i] - 1;
}
}
for i in 0..acc.len() {
acc[i] = (acc[i] > 0) as i32;
}
acc
}
/// several lines, each line has several digits
fn input() -> Vec<Binary> {
include_str!("../../input/03.txt")
.lines()
.map(|l| l.chars().map(|c| c.to_digit(2).unwrap() as i32).collect())
.collect()
}
|
mod lib;
fn main() {
let s = "abcd";
let t = "becd";
println!("{}", lib::lcs(s, t));
}
|
use crate::Vec2;
use super::Window;
use glium::{
Display,
glutin::{
self,
dpi::PhysicalSize,
window::WindowId
}
};
static mut WINDOWS: Vec <Display> = Vec::new();
pub struct WindowBuilder {
size: Option <Vec2 <u32>>,
title: String
}
impl Default for WindowBuilder {
fn default() -> Self {
Self {
size: None,
title: String::from("qqx")
}
}
}
impl WindowBuilder {
#[inline]
pub fn size(mut self, size: Vec2 <u32>) -> Self {
self.size = Some(size);
self
}
#[inline]
pub fn title <S> (mut self, title: S) -> Self where S: ToString {
self.title = title.to_string();
self
}
pub fn build(self) -> Window {
let mut builder = glutin::window::WindowBuilder::new().with_title(self.title);
if let Some(size) = self.size { builder = builder.with_inner_size(PhysicalSize::from(size)) }
let dpy = Display::new(builder, glutin::ContextBuilder::new(), crate::Stt::eventloop()).unwrap();
unsafe {
WINDOWS.push(dpy);
Window(WINDOWS.len() - 1)
}
}
}
impl Window {
#[inline]
pub(crate) fn dpy(self) -> &'static mut Display {
unsafe { &mut WINDOWS[self.0] }
}
pub(crate) fn by_id(id: WindowId) -> Self {
let mut i = 0;
unsafe {
while i < WINDOWS.len() {
if WINDOWS[i].gl_window().window().id() == id { return Window(i) }
i += 1
}
}
unreachable!()
}
}
|
#[macro_use]
use std::ops::{ Add, RangeInclusive };
use std::any::{ TypeId, Any };
use std::rc::Rc;
use std::cell::RefCell;
use log::{ info, debug, trace };
use crate::instructions::InstrThumb16;
/*
* ARMv7-M THUMB ENCODING
*
* The Thumb instruction stream is a sequence of halfword-aligned halfwords. Each Thumb instruction is either a
* single 16-bit halfword in that stream, or a 32-bit instruction consisting of two consecutive halfwords in that stream.
*
* If bits [15:11] of the halfword being decoded take any of the following values, the halfword is the first halfword of
* a 32-bit instruction:
* - 0b11101
* - 0b11110
* - 0b11111
*
*
*
* 16 BIT THUMB INSTRUCTION ENCODING
* =================================================
* |15 14 13 12 11 10|09 08 07 06 05 04 03 02 01 00|
* |opcode | |
* =================================================
*
*
* SHIFT (imm) ADD, SUBTRACT, MOVE, COMPARE
* =================================================
* |15 14|13 12 11 10 09|08 07 06 05 04 03 02 01 00|
* |0 0 |opcode | |
* =================================================
*
*
* DATA PROCESSING INSTRUCTION ENCODING
* =================================================
* |15 14 13 12 11 10|09 08 07 06|05 04 03 02 01 00|
* |0 1 0 0 0 0 |opcode | |
* =================================================
*
*
* SPECIAL DATA INSTRUCTIONS AND BRANCH AND EXCHANGE
* =================================================
* |15 14 13 12 11 10|09 08 07 06|05 04 03 02 01 00|
* |0 1 0 0 0 1 |opcode | |
* =================================================
*
*
* LOAD/STORE SINGLE DATA ITEM
* =================================================
* |15 14 13 12|11 10 09|08 07 06 05 04 03 02 01 00|
* |opA |opB | |
* =================================================
*
* NOTE:
* These instructions have one of the following values in opA:
* - 0b0101
* - 0b011x
* - 0b100x
*
*
* MISCELLANEOUS 16-BIT INSTRUCTIONS
* =================================================
* |15 14 13 12|11 10 09 08 07 06 05|04 03 02 01 00|
* |1 0 1 1 |opcode | |
* =================================================
*
*
* IF/THEN AND HINTS
* =================================================
* |15 14 13 12|11 10 09 08|07 06 05 04|03 02 01 00|
* |1 0 1 1 |1 1 1 1 |opA |opB |
* =================================================
*
* NOTE:
* Other encodings in this space are unallocated hints. They execute as NOPs, but software must not use them.
*
*
* CONDITIONAL BRANCH AND SUPERVISOR CALL
* =================================================
* |15 14 13 12|11 10 09 08|07 06 05 04 03 02 01 00|
* |1 1 0 1 |opcode | |
* =================================================
*
*
* 32-BIT THUMB INSTRUCTION ENCODING
* =================================================================================================
* |15 14 13|12 11|10 09 08 07 06 05 04|03 02 01 00|15|14 13 12 11 10 09 08 07 06 05 04 03 02 01 00|
* |1 1 1 |op1 |op2 | |op| |
* =================================================================================================
*
*
*
*/
/// What do we need to capture to fully describe an instruction?
/// * Name, long name, and optional description
/// * Family, thumb or thumb2
/// * Arity, discovered implicitly
/// * Invariant, the value which defines the instruction as being itself
/// * The algebraic variant of the instruction
/// * A description of each operand
/// * Individual encodings
///
/// What do we need to capture to fully describe an operand?
/// * Name, optional long name, and optional description
/// * The bit width of the operand
/// * The number of bits shifted from the right to the left of each operand
/// * The language representation of each operand, including whether it is signed or unsigned
/// * Whether the operand is a composite of bit sub-slices of the instruction
///
/// How do we want to represent that captured data?
/// * Verbose, immutable, structured data
///
/// How can we simplify the definitions of operands?
/// * Templates for commonly used operands?
///
#[allow(unused_macros)]
macro_rules! map_operand {
($instr:path, $op:ident, $repr:ident) => {
#[allow(unused_variables)]
{
Box::new(|_s, mut _i, _o| {
//trace!("_i: {:?}", _i);
//trace!(" Operand {{ name: {:?}, repr: {:?} }}", _s.name, _s.repr);
match _i {
$instr{ ref mut $op, .. } => {
let _temp_downcasted = *_o.downcast_ref::<i64>().expect("invalid signed operand intermediary downcast");
*$op = _temp_downcasted as $repr;
},
m => {
panic!(format!("invalid instruction operand field map: {:?}", m));
}
}
})
}
};
}
//#[allow(unused_macros)]
//macro_rules! map_operand {
// ($instr:path, $op:ident, $repr:ident) => {
// #[allow(unused_variables)]
// {
// Box::new(|_s, mut _i, _o| {
// trace!("Mapping field...");
// trace!(" {:?}", _i);
// trace!(" Operand {{ name: {:?}, repr: {:?} }}", _s.name, _s.repr);
// trace!(" Prior state: {:?}", _i);
// match _i {
// $instr{ ref mut $op, .. } => {
// trace!(" Performing operand downcast");
// match _s.repr {
// OperandRepr::SignedByte |
// OperandRepr::SignedShort |
// OperandRepr::SignedWord => {
// let _temp = *_o.downcast_ref::<i64>().expect("invalid signed operand intermediary downcast");
// *$op = _temp as $repr;
// },
//
// OperandRepr::UnsignedByte |
// OperandRepr::UnsignedShort |
// OperandRepr::UnsignedWord => {
// let _temp = *_o.downcast_ref::<u64>().expect("invalid unsigned operand intermediary downcast");
// *$op = _temp as $repr;
// },
// }
//
// },
// m => {
// panic!("invalid instruction operand field map: {:?}", m);
// }
// }
// trace!(" Resulting state: {:?}", _i);
// })
// }
// };
//}
#[cfg(test)]
mod test {
use super::*;
fn test_env() {
// Setup the test environment to print trace logs by default
let log_filter = "debug";
let write_style = "always";
let env = env_logger::Env::default()
.default_filter_or(log_filter)
.default_write_style_or(write_style);
let mut builder = env_logger::from_env(env);
builder.is_test(true);
builder.init();
// needed to avoid jumbled output on first line of test stdout
println!();
}
#[test]
fn instruction_description_builder() {
test_env();
let branch = InstrDesc::new()
.name("Branch")
.desc("Conditional and unconditional branching")
.encoding(Encoding::new()
.name("E1")
.desc("Performs a conditional branch")
.invariant(0xD000)
.ctor(Box::new(|| InstrThumb16::BranchE1{ cond: 0u8, imm: 0i8 }))
.operand(Operand::new()
.name("cond")
.width(4)
.shift(8)
.repr(OperandRepr::UnsignedByte)
.map(map_operand!(InstrThumb16::BranchE1, cond, u8))
.build())
.operand(Operand::new()
.name("imm")
.width(8)
.repr(OperandRepr::SignedByte)
.map(map_operand!(InstrThumb16::BranchE1, imm, i8))
.build())
.build())
.encoding(Encoding::new()
.name("E2")
.desc("Performs an unconditional branch")
.invariant(0xE000)
.ctor(Box::new(|| InstrThumb16::BranchE2{ imm: 0i16 }))
.operand(Operand::new()
.name("imm")
.width(11)
.repr(OperandRepr::SignedShort)
.map(map_operand!(InstrThumb16::BranchE2, imm, i16))
.build())
.build())
.build();
let test_desc = InstrDesc::new()
.name("SimpleTest")
.desc("Simple unit testing instruction")
.encoding(Encoding::new()
.name("E1")
.desc("First Encoding")
.invariant(0xF000)
.ctor(Box::new(|| InstrThumb16::Test{ a: 0u8, b: 0, c: 0u8} ))
.operand(Operand::new()
.name("a")
.width(2)
.shift(6)
.repr(OperandRepr::UnsignedByte)
.map(map_operand!(InstrThumb16::Test, a, u8))
.build())
.operand(Operand::new()
.name("b")
.width(2)
.shift(3)
.repr(OperandRepr::UnsignedByte)
.map(map_operand!(InstrThumb16::Test, b, u8))
.build())
.operand(Operand::new()
.name("c")
.width(2)
.shift(0)
.repr(OperandRepr::UnsignedByte)
.map(map_operand!(InstrThumb16::Test, c, u8))
.build())
.build())
.build();
trace!("{:#?}", branch);
for enc in branch.encodings {
enc.generate_decode_table();
}
for enc in test_desc.encodings {
enc.generate_decode_table();
}
}
}
#[derive(Debug)]
pub struct InstrDesc {
name: String,
desc: String,
encodings: Vec<Encoding>,
}
impl InstrDesc {
pub fn new() -> InstrDescBuilder {
trace!("New instruction description");
InstrDescBuilder {
name: None,
inner: InstrDesc {
name: Default::default(),
desc: Default::default(),
encodings: Vec::new()
}
}
}
}
#[derive(Debug)]
pub struct InstrDescBuilder {
name: Option<String>,
inner: InstrDesc
}
impl InstrDescBuilder {
pub fn name(mut self, name: &str) -> Self {
self.name = Some(String::from(name));
self
}
pub fn desc(mut self, desc: &str) -> Self {
self.inner.desc = String::from(desc);
self
}
pub fn encoding(mut self, encoding: Encoding) -> Self {
trace!("Adding instruction encoding");
// Insert the parent instruction name to the encoding for debug printing purposes
let mut temp = encoding;
if let Some(name) = &self.name {
temp.parent = name.clone();
} else {
panic!("Instructions must be named before encodings can be added");
}
self.inner.encodings.push(temp);
self
}
pub fn build(mut self) -> InstrDesc {
self.inner.name = self.name.expect("Attempted to build an unnamed instruction");
debug!("Building instruction description {:?} with {:?} encoding(s)", self.inner.name, self.inner.encodings.len());
self.inner
}
}
#[derive(Debug)]
pub struct Encoding {
parent: String,
name: String,
desc: String,
invariant: usize,
operands: Vec<Operand>,
ctor: Option<VariantCtorFn>
}
impl Encoding {
pub fn new() -> EncodingBuilder {
trace!("New encoding...");
EncodingBuilder {
inner: Encoding {
parent: Default::default(),
name: Default::default(),
desc: Default::default(),
invariant: Default::default(),
operands: Vec::new(),
ctor: None
}
}
}
pub fn arity(&self) -> usize {
self.operands.len()
}
pub fn generate_decode_table(&self) {
let rdb = RecursiveDecoderBuilder::new(&self);
let dct = rdb.build_decode_table();
for item in dct {
println!("{:?}", item);
}
//let decode_table = self.gdt_recursive(&self.operands, None, 0);
}
}
#[derive(Debug, Clone, Copy)]
pub struct InstructionCodecPair {
encoded: u16,
decoded: InstrThumb16
}
impl InstructionCodecPair {
fn new(encoded: u16, decoded: InstrThumb16) -> InstructionCodecPair {
InstructionCodecPair {
encoded, decoded
}
}
}
pub type DecodeTable = Vec<InstructionCodecPair>;
struct RecursiveDecoderBuilder<'a> {
encoding: &'a Encoding,
/// The resulting decode table
dectab: RefCell<DecodeTable>,
// this holds the current state of the instruction we are processing
//state: Rc<RefCell<InstrThumb16>>
}
impl<'a> RecursiveDecoderBuilder<'a> {
pub fn new(encoding: &'a Encoding) -> RecursiveDecoderBuilder<'a> {
RecursiveDecoderBuilder {
encoding: encoding,
dectab: RefCell::new(Vec::new()),
}
}
pub fn build_decode_table(self) -> DecodeTable {
let variant_constructor = self.encoding.ctor.as_ref().expect("no variant ctor");
let variant = (*variant_constructor)();
let mut table = Vec::new();
let mut state = variant;
self.build_decode_table_recursive(0, &mut state, &mut table);
for (i, item) in table.iter().enumerate() {
//println!("{:04}: {:?}", i + 1, item);
}
self.dectab.into_inner()
}
fn build_decode_table_recursive(&self, idx: usize, state: &mut InstrThumb16, output: &mut Vec<InstrThumb16>) {
trace!("Building decode table from instruction operand set (recursion level: {})", idx);
// Map all permutations of a given instruction into a list of decoded instructions
// If we have an operand
if let Some(operand) = self.encoding.operands.get(idx) {
let field_map = operand.map.as_ref().unwrap();
// Calculate the permutation range of the operand
let (low, high) = operand.permutations();
// For each permutation
for p in RangeInclusive::new(low, high) {
// Apply the field mapping function to update the value of the state
(*field_map)(&operand, state, &p);
// try and go one level deeper
self.build_decode_table_recursive(idx.add(1), state, output);
}
} else {
// Terminal, copy our current state into the decode table as a legal permutation
output.push(state.clone());
}
// if we are out of the recursion stack and about to return
if idx == 0 {
debug!("Built decode table for encoding {:?} of {:?} with {} permutation(s)", self.encoding.name, self.encoding.parent, output.len());
}
}
#[allow(dead_code)]
fn build_decoder(&self) {
unimplemented!()
}
}
#[derive(Debug)]
pub struct EncodingBuilder {
inner: Encoding
}
impl EncodingBuilder {
pub fn name(mut self, name: &str) -> Self {
trace!("Setting encoding name: {:?}", name);
self.inner.name = String::from(name);
self
}
pub fn desc(mut self, desc: &str) -> Self {
trace!("Setting encoding description: {:?}", desc);
self.inner.desc = String::from(desc);
self
}
pub fn operand(mut self, op: Operand) -> Self {
trace!("Adding encoding operand {:?}, width {:?}, shifted {:?} bits to the left", op.name, op.width, op.shift);
self.inner.operands.push(op);
self
}
pub fn invariant(mut self, invariant: usize) -> Self {
trace!("Setting encoding invariant: {:#06X}", invariant);
self.inner.invariant = invariant;
self
}
pub fn ctor(mut self, ctor_func: VariantCtorFn) -> Self {
trace!("Setting encoding variant constructor method");
self.inner.ctor = Some(ctor_func);
self
}
pub fn build(self) -> Encoding {
trace!("Building encoding {:?} with {:?} operands", self.inner.name, self.inner.operands.len());
self.inner
}
}
#[derive(Debug)]
pub struct Operand {
name: String,
width: usize,
shift: usize,
typeid: TypeId,
repr: OperandRepr,
map: Option<OperatorMapFn>
}
impl Operand {
pub fn new() -> OperandBuilder {
OperandBuilder {
inner: Operand {
name: Default::default(),
width: Default::default(),
shift: Default::default(),
repr: OperandRepr::UnsignedByte,
typeid: std::any::TypeId::of::<Self>(),
map: None
}
}
}
pub fn permutations(&self) -> (i64, i64) {
let range = std::cmp::max(1u64, 2u64.pow(self.width as u32));
match self.repr {
OperandRepr::SignedByte |
OperandRepr::SignedShort |
OperandRepr::SignedWord => {
let low = ((range / 2) as i64) * -1;
let high = ((range / 2) - 1) as i64;
(low, high)
},
OperandRepr::UnsignedByte |
OperandRepr::UnsignedShort |
OperandRepr::UnsignedWord => {
(0i64, (range - 1) as i64)
}
}
}
}
#[derive(Debug)]
pub struct OperandBuilder {
inner: Operand
}
impl OperandBuilder {
pub fn name(mut self, name: &str) -> Self {
trace!("Setting operand name {:?}", name);
self.inner.name = String::from(name);
self
}
pub fn width(mut self, width: usize) -> Self {
trace!("Setting operand width {:?}", width);
self.inner.width = width;
self
}
pub fn shift(mut self, shift: usize) -> Self {
trace!("Setting operand shift {:?}", shift);
self.inner.shift = shift;
self
}
pub fn repr(mut self, repr: OperandRepr) -> Self {
trace!("Setting operand repr {:?}", repr);
self.inner.repr = repr;
self
}
pub fn map(mut self, map_func: OperatorMapFn) -> Self {
trace!("Setting operand to decoded instruction mapping method");
self.inner.map = Some(map_func);
self
}
pub fn build(self) -> Operand {
trace!("Building operand {:?}", self.inner.name);
self.inner
}
}
#[derive(Debug, Clone)]
pub enum OperandRepr {
SignedByte,
SignedShort,
SignedWord,
UnsignedByte,
UnsignedShort,
UnsignedWord,
}
pub trait OperandMap: Fn(&Operand, &mut InstrThumb16, &dyn Any) { }
impl<F: Clone> OperandMap for F where F: Fn(&Operand, &mut InstrThumb16, &dyn Any) { }
pub type OperatorMapFn = Box<dyn OperandMap>;
impl std::fmt::Debug for OperatorMapFn {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "[Operand Mapping Function]")
}
}
pub trait VariantCtor: Fn() -> InstrThumb16 { }
impl<F> VariantCtor for F where F: Fn() -> InstrThumb16 { }
pub type VariantCtorFn = Box<dyn VariantCtor>;
impl std::fmt::Debug for VariantCtorFn {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "[Variant Constructor Function]")
}
}
|
use schemars::JsonSchema;
use std::collections::HashMap;
#[derive(Clone, Debug, Deserialize, JsonSchema, PartialEq, Serialize)]
#[cfg_attr(feature = "python", derive(FromPyObject, IntoPyObject))]
pub struct VideoScaling {
pub width: Option<u32>,
pub height: Option<u32>,
}
impl Into<HashMap<String, String>> for &VideoScaling {
fn into(self) -> HashMap<String, String> {
let width = self.width.map_or((-1).to_string(), |w| w.to_string());
let height = self.height.map_or((-1).to_string(), |h| h.to_string());
[("width", width), ("height", height)]
.iter()
.map(|(key, value)| (key.to_string(), value.clone()))
.collect()
}
}
#[test]
pub fn test_video_scaling_get_filter_parameters() {
let scaling = VideoScaling {
width: None,
height: None,
};
let parameters: HashMap<String, String> = (&scaling).into();
assert_eq!(&(-1).to_string(), parameters.get("width").unwrap());
assert_eq!(&(-1).to_string(), parameters.get("height").unwrap());
let scaling = VideoScaling {
width: Some(1234),
height: None,
};
let parameters: HashMap<String, String> = (&scaling).into();
assert_eq!(&1234.to_string(), parameters.get("width").unwrap());
assert_eq!(&(-1).to_string(), parameters.get("height").unwrap());
let scaling = VideoScaling {
width: None,
height: Some(1234),
};
let parameters: HashMap<String, String> = (&scaling).into();
assert_eq!(&(-1).to_string(), parameters.get("width").unwrap());
assert_eq!(&1234.to_string(), parameters.get("height").unwrap());
let scaling = VideoScaling {
width: Some(1234),
height: Some(5678),
};
let parameters: HashMap<String, String> = (&scaling).into();
assert_eq!(&1234.to_string(), parameters.get("width").unwrap());
assert_eq!(&5678.to_string(), parameters.get("height").unwrap());
}
|
use crate::name::{ NamePtr, NamesPtr, StringPtr };
use crate::level::LevelsPtr;
use crate::expr::ExprPtr;
use crate::tc::infer::InferFlag::*;
use crate::quot::add_quot;
use crate::inductive::IndBlock;
use crate::utils::{
Tc,
Ptr,
Env,
Live,
IsCtx,
IsLiveCtx,
LiveZst,
ListPtr,
List::*,
Store,
HasNanodaDbg
};
use Notation::*;
use ReducibilityHint::*;
use Declar::*;
use DeclarSpec::*;
pub type RecRulePtr<'a> = Ptr<'a, RecRule<'a>>;
pub type RecRulesPtr<'a> = ListPtr<'a, RecRule<'a>>;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Notation<'a> {
Prefix { name : NamePtr<'a>, priority : usize, oper : StringPtr<'a> },
Infix { name : NamePtr<'a>, priority : usize, oper : StringPtr<'a> },
Postfix { name : NamePtr<'a>, priority : usize, oper : StringPtr<'a> },
}
impl<'a> Notation<'a> {
pub fn new_prefix(name : NamePtr<'a>, priority : usize, oper : StringPtr<'a>) -> Self {
Prefix { name, priority, oper }
}
pub fn new_infix(name : NamePtr<'a>, priority : usize, oper : StringPtr<'a>) -> Self {
Infix { name, priority, oper }
}
pub fn new_postfix(name : NamePtr<'a>, priority : usize, oper : StringPtr<'a>) -> Self {
Postfix { name, priority, oper }
}
}
#[derive(Debug)]
pub enum DeclarSpec<'a> {
AxiomSpec {
name : NamePtr<'a>,
type_ : ExprPtr<'a>,
uparams : LevelsPtr<'a>,
is_unsafe : bool,
},
DefinitionSpec {
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
val : ExprPtr<'a>,
is_unsafe : bool,
},
TheoremSpec {
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
val : ExprPtr<'a>,
},
OpaqueSpec {
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
val : ExprPtr<'a>,
},
QuotSpec,
InductiveSpec(IndBlock<'a>),
}
impl<'l, 'e : 'l> DeclarSpec<'e> {
pub fn new_axiom(name : NamePtr<'e>,
uparams : LevelsPtr<'e>,
type_ : ExprPtr<'e>,
is_unsafe : bool) -> Self {
assert!(name.in_env() && uparams.in_env() && type_.in_env());
AxiomSpec {
name,
uparams,
type_,
is_unsafe,
}
}
pub fn new_def(name : NamePtr<'e>,
uparams : LevelsPtr<'e>,
type_ : ExprPtr<'e>,
val : ExprPtr<'e>,
is_unsafe : bool) -> Self {
assert!(name.in_env() && uparams.in_env() && type_.in_env() && val.in_env());
DefinitionSpec {
name,
uparams,
type_,
val,
is_unsafe,
}
}
pub fn new_quot() -> Self {
QuotSpec
}
pub fn new_inductive(indblock : IndBlock<'e>) -> Self {
InductiveSpec(indblock)
}
pub fn compile(self, compiler : &mut Live<'l, 'e>) {
match self {
AxiomSpec { name, uparams, type_, is_unsafe } => {
if compiler.debug_mode() {
println!("compiling axiom: {}", name.nanoda_dbg(compiler));
}
let d = Axiom {
name,
uparams,
type_,
is_unsafe,
};
compiler.admit_declar(d);
},
DefinitionSpec { name, uparams, type_, val, is_unsafe } => {
if compiler.debug_mode() {
println!("compiling def : {}", name.nanoda_dbg(compiler));
}
let d = Definition {
name,
uparams,
type_,
val,
hint : Reg(val.calc_height(compiler)),
is_unsafe,
};
compiler.admit_declar(d);
}
TheoremSpec {..} => {
unimplemented!("Theorem not yet implemented")
}
OpaqueSpec {..} => {
unimplemented!("Opaque not yet implemented")
},
QuotSpec => {
if compiler.debug_mode() {
println!("compiling quot")
}
add_quot(compiler)
}
// Right now, the compilation step for inductive also includes
// all of their checks. Breaking them out would require making
// annoying changes to the inductive module, and as things are now
// (it might change with mutuals) they're extremely cheap to check
// compared to definitions, so I'm just going to let it rock.
InductiveSpec(mut indblock) => {
if compiler.debug_mode() {
println!("compiling inductive: {}", indblock.ind_names.nanoda_dbg(compiler));
}
indblock.declare_ind_types(compiler);
indblock.mk_local_indices(compiler);
indblock.declare_cnstrs(compiler);
indblock.mk_elim_level(compiler);
indblock.init_k_target(compiler);
indblock.mk_majors_wrapper(compiler);
indblock.mk_motives_wrapper(compiler);
indblock.mk_minors_wrapper(compiler);
indblock.declare_rec_rules(compiler);
indblock.declare_recursors(compiler);
}
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct RecRule<'a> {
pub cnstr_name : NamePtr<'a>,
pub num_fields : u16,
pub val : ExprPtr<'a>
}
impl<'a> RecRule<'a> {
pub fn new(cnstr_name : NamePtr<'a>, num_fields : u16, val : ExprPtr<'a>) -> Self {
RecRule {
cnstr_name,
num_fields,
val
}
}
pub fn insert_env<'e>(
self,
env : &mut Env<'e>,
live : &Store<LiveZst>
) -> RecRulePtr<'e> {
RecRule {
cnstr_name : self.cnstr_name.insert_env(env, live),
num_fields : self.num_fields,
val : self.val.insert_env(env, live)
}.alloc(env)
}
}
fn get_rec_rule_aux<'a>(
rem_rules : ListPtr<'a, RecRule<'a>>,
c_name : NamePtr<'a>,
ctx : &impl IsLiveCtx<'a>
) -> Option<RecRulePtr<'a>> {
match rem_rules.read(ctx) {
Cons(hd, _) if hd.read(ctx).cnstr_name == c_name => Some(hd),
Cons(_, tl) => get_rec_rule_aux(tl, c_name, ctx),
Nil => None
}
}
impl<'a> HasNanodaDbg<'a> for RecRule<'a> {
fn nanoda_dbg(self, ctx : &impl IsCtx<'a>) -> String {
format!("RecRule( cnstr_name : {}, num_fields : {}, val : {})",
self.cnstr_name.nanoda_dbg(ctx),
self.num_fields,
self.val.nanoda_dbg(ctx))
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Declar<'a> {
Axiom {
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
is_unsafe : bool,
},
Definition {
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
val : ExprPtr<'a>,
hint : ReducibilityHint,
is_unsafe : bool,
},
Theorem {
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
val : ExprPtr<'a>,
},
Opaque {
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
val : ExprPtr<'a>,
},
Quot {
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
},
Inductive {
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
num_params : u16,
all_ind_names : NamesPtr<'a>,
all_cnstr_names : NamesPtr<'a>,
//pub is_rec : bool,
//pub is_reflexive : bool,
is_unsafe : bool,
},
Constructor {
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
parent_name : NamePtr<'a>,
num_fields : u16,
num_params : u16,
is_unsafe : bool,
},
Recursor {
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
all_names : NamesPtr<'a>,
num_params : u16,
num_indices : u16,
num_motives : u16,
num_minors : u16,
major_idx : u16,
rec_rules : ListPtr<'a, RecRule<'a>>,
is_k : bool,
is_unsafe : bool,
}
}
impl<'a> Declar<'a> {
pub fn get_hint(self) -> ReducibilityHint {
match self {
Definition { hint, .. } => hint,
owise => unreachable!("Only Definition declars have a reducibility hint! found {:#?}", owise)
}
}
pub fn rec_num_params(&self) -> Option<u16> {
match self {
Recursor { num_params, .. } => Some(*num_params),
_ => None
}
}
pub fn rec_num_motives(&self) -> Option<u16> {
match self {
Recursor { num_motives, .. } => Some(*num_motives),
_ => None
}
}
pub fn rec_num_minors(&self) -> Option<u16> {
match self {
Recursor { num_minors, .. } => Some(*num_minors),
_ => None
}
}
pub fn rec_major_idx(&self) -> Option<u16> {
match self {
Recursor { major_idx, .. } => Some(*major_idx),
_ => None
}
}
pub fn rec_is_k(&self) -> Option<bool> {
match self {
Recursor { is_k, .. } => Some(*is_k),
_ => None
}
}
pub fn get_rec_rule(
self,
major : ExprPtr<'a>,
ctx : &impl IsLiveCtx<'a>
) -> Option<RecRulePtr<'a>> {
match self {
Recursor { rec_rules, .. } => {
let (c_name, _) = major.unfold_apps_fun(ctx).try_const_info(ctx)?;
get_rec_rule_aux(rec_rules, c_name, ctx)
},
_ => None
}
}
pub fn new_axiom(
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
is_unsafe : bool
) -> Self {
Axiom {
name,
uparams,
type_,
is_unsafe
}
}
pub fn new_definition(
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
val : ExprPtr<'a>,
is_unsafe : bool,
live : &mut Live<'a, '_>
) -> Self {
Definition {
name,
uparams,
type_,
val,
hint : Reg(val.calc_height(live)),
is_unsafe
}
}
pub fn name(&self) -> NamePtr<'a> {
match self {
| Axiom { name, .. }
| Definition { name, .. }
| Theorem { name, .. }
| Opaque { name, .. }
| Quot { name, .. }
| Inductive { name, .. }
| Constructor { name, .. }
| Recursor { name, .. } => *name,
}
}
pub fn uparams(&self) -> LevelsPtr<'a> {
match self {
| Axiom { uparams, .. }
| Definition { uparams, .. }
| Theorem { uparams, .. }
| Opaque { uparams, .. }
| Quot { uparams, .. }
| Inductive { uparams, .. }
| Constructor { uparams, .. }
| Recursor { uparams, .. } => *uparams,
}
}
pub fn type_(&self) -> ExprPtr<'a> {
match self {
| Axiom { type_, .. }
| Definition { type_, .. }
| Theorem { type_, .. }
| Opaque { type_, .. }
| Quot { type_, .. }
| Inductive { type_, .. }
| Constructor { type_, .. }
| Recursor { type_, .. } => *type_,
}
}
pub fn is_unsafe(&self) -> bool {
match self {
Theorem {..}
| Opaque {..}
| Quot {..} => false,
| Axiom { is_unsafe, .. }
| Definition { is_unsafe, .. }
| Inductive { is_unsafe, .. }
| Constructor { is_unsafe, .. }
| Recursor { is_unsafe, .. } => *is_unsafe,
}
}
pub fn new_inductive(
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
num_params : u16,
all_ind_names : NamesPtr<'a>,
all_cnstr_names : NamesPtr<'a>,
//is_rec : bool,
//is_reflexive : bool,
is_unsafe : bool
) -> Self {
Inductive {
name,
uparams,
type_,
num_params,
all_ind_names,
all_cnstr_names,
//is_rec,
//is_reflexive,
is_unsafe
}
}
pub fn new_cnstr<'e>(
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
parent_name : NamePtr<'a>,
num_params : u16,
is_unsafe : bool,
ctx : &mut Live<'a, 'e>
) -> Self {
Constructor {
name,
uparams,
type_,
parent_name,
num_fields : type_.telescope_size(ctx) - num_params,
num_params,
is_unsafe
}
}
pub fn new_recursor(
name : NamePtr<'a>,
uparams : LevelsPtr<'a>,
type_ : ExprPtr<'a>,
all_names : NamesPtr<'a>,
num_params : u16,
num_indices : u16,
num_motives : u16,
num_minors : u16,
major_idx : u16,
rec_rules : RecRulesPtr<'a>,
is_k : bool,
is_unsafe : bool
) -> Self {
Recursor {
name,
uparams,
type_,
all_names,
num_params,
num_indices,
num_motives,
num_minors,
major_idx,
rec_rules,
is_k,
is_unsafe,
}
}
pub fn insert_env<'e>(
self,
env : &mut Env<'e>,
live : &Store<'a, LiveZst>
) -> Declar<'e> {
let name = self.name().insert_env(env, live);
let uparams = self.uparams().insert_env(env, live);
let type_ = self.type_().insert_env(env, live);
match self {
Axiom { is_unsafe, .. } => {
Axiom {
name,
uparams,
type_,
is_unsafe
}
},
Definition { val, hint, is_unsafe, .. } => {
Definition {
name,
uparams,
type_,
val : val.insert_env(env, live),
hint,
is_unsafe,
}
},
Theorem { val, .. } => {
Theorem {
name,
uparams,
type_,
val : val.insert_env(env, live),
}
},
Opaque { val, .. } => {
Opaque {
name,
uparams,
type_,
val : val.insert_env(env, live),
}
},
Quot { .. } => {
Quot {
name,
uparams,
type_
}
},
Inductive { num_params, all_ind_names, all_cnstr_names, is_unsafe, .. } => {
Inductive {
name,
uparams,
type_,
num_params,
all_ind_names : all_ind_names.insert_env(env, live),
all_cnstr_names : all_cnstr_names.insert_env(env, live),
is_unsafe
}
},
Constructor { parent_name, num_fields, num_params, is_unsafe, .. } => {
Constructor {
name,
uparams,
type_,
parent_name : parent_name.insert_env(env, live),
num_fields,
num_params,
is_unsafe
}
},
Recursor { all_names, num_params, num_indices, num_motives, num_minors, major_idx, rec_rules, is_k, is_unsafe, .. } => {
Recursor {
name,
uparams,
type_,
all_names : all_names.insert_env(env, live),
num_params,
num_indices,
num_motives,
num_minors,
major_idx,
rec_rules : rec_rules.insert_env(env, live),
is_k,
is_unsafe
}
}
}
}
pub fn check<'l>(self, _should_check : bool, live : &mut Live<'l, 'a>) {
match self {
Axiom { name, uparams, type_, .. } => {
if live.debug_mode() {
println!("checking axiom: {}", name.nanoda_dbg(live));
}
check_vitals(name, uparams, type_, live);
},
Definition { name, uparams, type_, val, is_unsafe, .. } if !is_unsafe => {
if live.debug_mode() {
println!("checking def: {}", name.nanoda_dbg(live));
}
{
let mut tc = live.as_tc(Some(uparams), None);
check_vitals_w_tc(name, uparams, type_, &mut tc);
let val_type = val.infer(Check, &mut tc);
val_type.assert_def_eq(type_, &mut tc);
}
},
Definition {..} => {
unimplemented!("unsafe declar");
//assert!(is_unsafe);
//if should_check {
// // FIXME handle safe_only flag properly
// let mut tc = live.get_tc(Some(uparams));
// check_vitals_w_tc(name, uparams, type_, &mut tc);
//}
//let declar = Declar::new_definition(name, uparams, type_, val, is_unsafe, live);
//live.admit_declar(declar);
//if should_check {
// let mut tc = live.get_tc(Some(uparams));
// let val_type = val.infer(Check, &mut tc);
// assert!(val_type.def_eq(type_, &mut tc).is_eq_short());
//}
},
// All of these are done in `compile` right now.
// See the comment in that method.
Quot {..} => (),
Inductive {..} => (),
Constructor {..} => (),
Recursor {..} => (),
Theorem {..} => unimplemented!("Theorem not implemented in lean3!"),
Opaque {..} => unimplemented!("Opaque not implemented in lean3!")
}
}
}
impl<'a> HasNanodaDbg<'a> for Declar<'a> {
fn nanoda_dbg(self, _ctx : &impl IsCtx<'a>) -> String {
unimplemented!()
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum ReducibilityHint {
Opaq,
Reg(u16),
Abbrev,
}
// We reuse the typechecker for definitions, so the inference
// will be a cache hit.
pub fn check_vitals_w_tc<'t, 'l : 't, 'e : 'l>(
name : NamePtr<'l>,
uparams : LevelsPtr<'l>,
type_ : ExprPtr<'l>,
tc : &mut Tc<'t, 'l, 'e>
) {
assert!(name.in_env());
assert!(uparams.in_env());
assert!(type_.in_env());
assert!(uparams.no_dupes(tc));
assert!(!type_.has_locals(tc));
let inferred_type = type_.infer(Check, tc);
inferred_type.ensure_sort(tc);
}
pub fn check_vitals<'t, 'l : 't, 'e : 'l>(
name : NamePtr<'l>,
uparams : LevelsPtr<'l>,
type_ : ExprPtr<'l>,
/*safe_only : Option<bool>,*/
live : &mut Live<'l, 'e>
) {
let mut tc = live.as_tc(Some(uparams), None);
check_vitals_w_tc(name, uparams, type_, &mut tc);
} |
extern crate clap;
use self::clap::{App, Arg};
use config::Config;
pub fn get_opts() -> Config {
let matches = App::new("dowser - ike-scan convenience facade")
.version(env!("CARGO_PKG_VERSION"))
.author("eHealth Experts GmbH https://www.ehealthexperts.de")
.about("Convenience wrapper tool that utilizes ike-scan and interprets the output to analyze VPN connectivity from a certain environment.")
.arg(Arg::with_name("verbose")
.short("v")
.help("Activate verbose log output")
)
.arg(Arg::with_name("source_port")
.short("s")
.long("sport")
.value_name("ike-scan source port")
.default_value("4242")
.help("Local source port to establish connection from (only non-NAT-T case)")
.takes_value(true)
)
.arg(Arg::with_name("endpoint")
.value_name("VPN endpoint IP")
.default_value("146.185.113.4")
.help("IP of the VPN endpoint to attempt to connect to")
.takes_value(true)
)
.get_matches();
let mut cfg_bldr = Config::builder();
if let Some(endpoint) = matches.value_of("endpoint") {
cfg_bldr.vpn_endpoint_ip(endpoint);
}
if let Some(port_arg) = matches.value_of("source_port") {
cfg_bldr.source_port(port_arg);
}
if matches.is_present("verbose") {
cfg_bldr.verbose();
}
cfg_bldr.build()
}
|
#![allow(unused)]
use std::process::Command;
use std::io;
#[cfg(windows)] use winres::WindowsResource;
fn main() {
#[cfg(target_os = "windows")]
fn pack_resource(){
Command::new("packfolder")
.args(&["app", "target/assets.rc", "-binary"])
.output()
.expect("Unable to run packfolder.exe!");
WindowsResource::new()
.set_icon("icon.ico")
.compile();
}
#[cfg(target_os = "linux")]
fn pack_resource(){
Command::new("packfolder")
.args(&["app", "target/assets.rc", "-binary"])
.output()
.expect("Unable to run packfolder!");
}
pack_resource();
}
|
//! This crate provides generic escaping of characters without requiring allocations. It leverages
//! `fast_fmt` crate to do this.
//!
//! #Examples
//!
//! Escaping whole writer
//!
//! ```
//! #[macro_use]
//! extern crate fast_fmt;
//! extern crate fast_escape;
//! extern crate void;
//!
//! use fast_escape::Escaper;
//! use fast_fmt::Write;
//! use void::ResultVoidExt;
//!
//! fn main() {
//! let mut s = String::new();
//! {
//! let s = &mut s;
//! let mut tr = s.transform(Escaper::new('\\', '$'));
//!
//! fwrite!(&mut tr, "abcd$efgh").void_unwrap();
//! }
//!
//! assert_eq!(s, "abcd\\$efgh");
//! }
//! ```
//!
//! Escaping part of formatted text
//!
//! ```
//! #[macro_use]
//! extern crate fast_fmt;
//! extern crate fast_escape;
//! extern crate void;
//!
//! use fast_escape::Escaper;
//! use void::ResultVoidExt;
//!
//! fn main() {
//! let mut s = String::new();
//! let special_chars = ['$', '"'];
//! let escaper: Escaper<&[char]> = Escaper::new('\\', &special_chars);
//! let value = "$Hello \"world\"!";
//! fwrite!(&mut s, "$foo=\"", value.transformed(escaper), "\"").void_unwrap();
//!
//! assert_eq!(s, "$foo=\"\\$Hello \\\"world\\\"!\"");
//! }
//! ```
#![no_std]
#[cfg_attr(test, macro_use)]
extern crate fast_fmt;
#[cfg(feature = "std")]
extern crate std;
/// Represents set of chars used for configuring `Escaper`.
pub trait ContainsChar {
/// Returns true if the set represented by the type contains `c`.
fn contains_char(&self, c: char) -> bool;
/// Combinator for creating unions of the sets.
fn union<T: ContainsChar>(self, other: T) -> Union<Self, T> where Self: Sized {
Union::new(self, other)
}
}
impl<'a, T: ContainsChar + ?Sized> ContainsChar for &'a T {
fn contains_char(&self, c: char) -> bool {
(*self).contains_char(c)
}
}
impl ContainsChar for char {
fn contains_char(&self, c: char) -> bool {
c == *self
}
}
impl ContainsChar for [char] {
fn contains_char(&self, c: char) -> bool {
self.contains(&c)
}
}
impl ContainsChar for core::ops::Range<char> {
fn contains_char(&self, c: char) -> bool {
c >= self.start && c < self.end
}
}
impl ContainsChar for core::ops::RangeFrom<char> {
fn contains_char(&self, c: char) -> bool {
c >= self.start
}
}
impl ContainsChar for core::ops::RangeTo<char> {
fn contains_char(&self, c: char) -> bool {
c < self.end
}
}
impl ContainsChar for core::ops::RangeFull {
fn contains_char(&self, _: char) -> bool {
true
}
}
#[cfg(feature = "std")]
impl<S: std::hash::BuildHasher> ContainsChar for std::collections::HashSet<char, S> {
fn contains_char(&self, c: char) -> bool {
self.contains(&c)
}
}
#[cfg(feature = "std")]
impl ContainsChar for std::collections::BTreeSet<char> {
fn contains_char(&self, c: char) -> bool {
self.contains(&c)
}
}
/// Union of two sets of chars.
pub struct Union<A: ContainsChar, B: ContainsChar> {
a: A,
b: B,
}
impl<A: ContainsChar, B: ContainsChar> Union<A, B> {
fn new(a: A, b: B) -> Self {
Union {
a,
b
}
}
}
impl<A: ContainsChar, B: ContainsChar> ContainsChar for Union<A, B> {
fn contains_char(&self, c: char) -> bool {
self.a.contains_char(c) || self.b.contains_char(c)
}
}
/// Set defined by given predicate (function).
pub struct Predicate<F: Fn(char) -> bool>(pub F);
impl<F: Fn(char) -> bool> ContainsChar for Predicate<F> {
fn contains_char(&self, c: char) -> bool {
self.0(c)
}
}
/// This struct provides escaping of characters.
pub struct Escaper<C: ContainsChar> {
chars: C,
escape: char,
}
impl <C: ContainsChar> Escaper<C> {
/// Creates the escaper.
/// `escape_char` is the char which is used for escaping (e.g. '\\')
/// `special_chars` is set of chars that should be escaped.
pub fn new(escape_char: char, special_chars: C) -> Self {
Escaper {
chars: special_chars,
escape: escape_char,
}
}
}
impl<C: ContainsChar> fast_fmt::transform::Transform for Escaper<C> {
fn transform_char<W: fast_fmt::Write>(&self, writer: &mut W, c: char) -> Result<(), W::Error> {
if self.chars.contains_char(c) {
writer.write_char(self.escape)?;
}
writer.write_char(c)
}
fn transform_size_hint(&self, bytes: usize) -> usize {
bytes * self.escape.len_utf8()
}
}
#[cfg(test)]
mod tests {
use ::Escaper;
use fast_fmt::Write;
use ::std::string::String;
#[test]
fn single_char() {
let mut s = String::new();
{
let s = &mut s;
let mut tr = s.transform(Escaper::new('\\', '$'));
fwrite!(&mut tr, "abcd$efgh").unwrap();
}
assert_eq!(s, "abcd\\$efgh");
}
#[test]
fn range() {
let mut s = String::new();
{
let s = &mut s;
let mut tr = s.transform(Escaper::new('\\', 'a'..'c'));
fwrite!(&mut tr, "abcd$efgh").unwrap();
}
assert_eq!(s, "\\a\\bcd$efgh");
}
#[test]
fn union() {
use ::ContainsChar;
let mut s = String::new();
{
let s = &mut s;
let mut tr = s.transform(Escaper::new('\\', ('a'..'c').union('e'..'g')));
fwrite!(&mut tr, "abcd$efgh").unwrap();
}
assert_eq!(s, "\\a\\bcd$\\e\\fgh");
}
}
|
extern crate env_logger;
extern crate clap;
#[macro_use] extern crate log;
use env_logger::{
Builder,
Env
};
use clap::{App, Arg};
use std::net::SocketAddr;
use pcap::{Device, Capture};
use pnet::packet::{
ethernet::{EtherTypes, EthernetPacket},
ipv4::Ipv4Packet,
Packet,
};
mod handlers;
fn get_requested_device<'a> (requested_device_s : &str, requested_device : &'a mut Device, vec_devices : &'a Vec<Device>) {
for device in vec_devices {
if &*device.name == requested_device_s {
requested_device.name = device.name.clone();
requested_device.desc = device.desc.clone();
println!("-{} device has been captured!", requested_device_s);
};
};
}
fn main() {
let flags = App::new("traffic-by-ip-exporter")
.version("0.1.0")
.about("Prometheus exporter for traffic accounting by IP")
.author("Luis Felipe Domínguez Vega <ldominguezvega@gmail.com>")
.arg(Arg::with_name("interface")
.short("i")
.long("interface")
.help("Interface for listen")
.required(true)
.takes_value(true)
)
.arg(Arg::with_name("port")
.short("p")
.long("port")
.help("Host port to expose http server")
.required(false)
.takes_value(true)
.default_value("9185")
)
.arg(Arg::with_name("host")
.short("h")
.long("host")
.help("Address where to expose http server")
.required(false)
.takes_value(true)
.default_value("0.0.0.0")
)
.get_matches();
let iface = flags.value_of("interface").unwrap();
let expose_port = flags.value_of("port").unwrap();
let expose_host = flags.value_of("host").unwrap();
Builder::from_env(Env::default().default_filter_or("info")).init();
info!("Using interface: {}", iface);
// Parse address used to bind exporter to.
let addr_raw = expose_host.to_owned() + ":" + expose_port;
let addr: SocketAddr = addr_raw.parse().expect("can not parse listen addr");
// Start exporter.
/* let (request_receiver, finished_sender) = PrometheusExporter::run_and_notify(addr);
let label_vector = [
"direction",
"src_ip",
"dst_ip",
"src_port",
"dst_port"
];
let traffic_by_ip_bits_opts = Opts::new("traffic_by_ip_bits", "Traffic by IP");
traffic_by_ip_bits_opts.variable_labels(label_vector);
let traffic_by_ip_bits = Gauge::with_opts(traffic_by_ip_bits_opts)
.expect("Can't create gauge traffic_by_ip__bits");
let r = Registry::new();
r.register(Box::new(traffic_by_ip_bits.clone())).unwrap(); */
let devices = Device::list();
let mut main_device : Device = Device::lookup().unwrap();
match devices {
Ok(vec_devices) => {
get_requested_device(&iface, &mut main_device, &vec_devices);
}
Err(_) => {
error!("No devices found...");
std::process::exit(1);
}
}
if main_device.name != iface {
std::process::exit(1);
}
let mut cap = Capture::from_device(main_device).unwrap()
.promisc(false)
.snaplen(5000)
.open().unwrap();
while let Ok(packet) = cap.next() {
let ethernet = EthernetPacket::new(packet.data).unwrap();
match ethernet.get_ethertype() {
EtherTypes::Ipv4 => {
handlers::handle_ipv4_packet(iface, ðernet);
}
_ => println!("unhandled packet: {:?}", ethernet),
}
}
}
|
#[doc = "Register `AHB1RSTR` reader"]
pub type R = crate::R<AHB1RSTR_SPEC>;
#[doc = "Register `AHB1RSTR` writer"]
pub type W = crate::W<AHB1RSTR_SPEC>;
#[doc = "Field `GPIOARST` reader - IO port A reset"]
pub type GPIOARST_R = crate::BitReader<GPIOARST_A>;
#[doc = "IO port A reset\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum GPIOARST_A {
#[doc = "1: Reset the selected module"]
Reset = 1,
}
impl From<GPIOARST_A> for bool {
#[inline(always)]
fn from(variant: GPIOARST_A) -> Self {
variant as u8 != 0
}
}
impl GPIOARST_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<GPIOARST_A> {
match self.bits {
true => Some(GPIOARST_A::Reset),
_ => None,
}
}
#[doc = "Reset the selected module"]
#[inline(always)]
pub fn is_reset(&self) -> bool {
*self == GPIOARST_A::Reset
}
}
#[doc = "Field `GPIOARST` writer - IO port A reset"]
pub type GPIOARST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, GPIOARST_A>;
impl<'a, REG, const O: u8> GPIOARST_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Reset the selected module"]
#[inline(always)]
pub fn reset(self) -> &'a mut crate::W<REG> {
self.variant(GPIOARST_A::Reset)
}
}
#[doc = "Field `GPIOBRST` reader - IO port B reset"]
pub use GPIOARST_R as GPIOBRST_R;
#[doc = "Field `GPIOCRST` reader - IO port C reset"]
pub use GPIOARST_R as GPIOCRST_R;
#[doc = "Field `GPIODRST` reader - IO port D reset"]
pub use GPIOARST_R as GPIODRST_R;
#[doc = "Field `GPIOERST` reader - IO port E reset"]
pub use GPIOARST_R as GPIOERST_R;
#[doc = "Field `GPIOFRST` reader - IO port F reset"]
pub use GPIOARST_R as GPIOFRST_R;
#[doc = "Field `GPIOGRST` reader - IO port G reset"]
pub use GPIOARST_R as GPIOGRST_R;
#[doc = "Field `GPIOHRST` reader - IO port H reset"]
pub use GPIOARST_R as GPIOHRST_R;
#[doc = "Field `GPIOIRST` reader - IO port I reset"]
pub use GPIOARST_R as GPIOIRST_R;
#[doc = "Field `CRCRST` reader - CRC reset"]
pub use GPIOARST_R as CRCRST_R;
#[doc = "Field `DMA1RST` reader - DMA2 reset"]
pub use GPIOARST_R as DMA1RST_R;
#[doc = "Field `DMA2RST` reader - DMA2 reset"]
pub use GPIOARST_R as DMA2RST_R;
#[doc = "Field `ETHMACRST` reader - Ethernet MAC reset"]
pub use GPIOARST_R as ETHMACRST_R;
#[doc = "Field `OTGHSRST` reader - USB OTG HS module reset"]
pub use GPIOARST_R as OTGHSRST_R;
#[doc = "Field `GPIOBRST` writer - IO port B reset"]
pub use GPIOARST_W as GPIOBRST_W;
#[doc = "Field `GPIOCRST` writer - IO port C reset"]
pub use GPIOARST_W as GPIOCRST_W;
#[doc = "Field `GPIODRST` writer - IO port D reset"]
pub use GPIOARST_W as GPIODRST_W;
#[doc = "Field `GPIOERST` writer - IO port E reset"]
pub use GPIOARST_W as GPIOERST_W;
#[doc = "Field `GPIOFRST` writer - IO port F reset"]
pub use GPIOARST_W as GPIOFRST_W;
#[doc = "Field `GPIOGRST` writer - IO port G reset"]
pub use GPIOARST_W as GPIOGRST_W;
#[doc = "Field `GPIOHRST` writer - IO port H reset"]
pub use GPIOARST_W as GPIOHRST_W;
#[doc = "Field `GPIOIRST` writer - IO port I reset"]
pub use GPIOARST_W as GPIOIRST_W;
#[doc = "Field `CRCRST` writer - CRC reset"]
pub use GPIOARST_W as CRCRST_W;
#[doc = "Field `DMA1RST` writer - DMA2 reset"]
pub use GPIOARST_W as DMA1RST_W;
#[doc = "Field `DMA2RST` writer - DMA2 reset"]
pub use GPIOARST_W as DMA2RST_W;
#[doc = "Field `ETHMACRST` writer - Ethernet MAC reset"]
pub use GPIOARST_W as ETHMACRST_W;
#[doc = "Field `OTGHSRST` writer - USB OTG HS module reset"]
pub use GPIOARST_W as OTGHSRST_W;
impl R {
#[doc = "Bit 0 - IO port A reset"]
#[inline(always)]
pub fn gpioarst(&self) -> GPIOARST_R {
GPIOARST_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - IO port B reset"]
#[inline(always)]
pub fn gpiobrst(&self) -> GPIOBRST_R {
GPIOBRST_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - IO port C reset"]
#[inline(always)]
pub fn gpiocrst(&self) -> GPIOCRST_R {
GPIOCRST_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - IO port D reset"]
#[inline(always)]
pub fn gpiodrst(&self) -> GPIODRST_R {
GPIODRST_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - IO port E reset"]
#[inline(always)]
pub fn gpioerst(&self) -> GPIOERST_R {
GPIOERST_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - IO port F reset"]
#[inline(always)]
pub fn gpiofrst(&self) -> GPIOFRST_R {
GPIOFRST_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - IO port G reset"]
#[inline(always)]
pub fn gpiogrst(&self) -> GPIOGRST_R {
GPIOGRST_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - IO port H reset"]
#[inline(always)]
pub fn gpiohrst(&self) -> GPIOHRST_R {
GPIOHRST_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - IO port I reset"]
#[inline(always)]
pub fn gpioirst(&self) -> GPIOIRST_R {
GPIOIRST_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 12 - CRC reset"]
#[inline(always)]
pub fn crcrst(&self) -> CRCRST_R {
CRCRST_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 21 - DMA2 reset"]
#[inline(always)]
pub fn dma1rst(&self) -> DMA1RST_R {
DMA1RST_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - DMA2 reset"]
#[inline(always)]
pub fn dma2rst(&self) -> DMA2RST_R {
DMA2RST_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 25 - Ethernet MAC reset"]
#[inline(always)]
pub fn ethmacrst(&self) -> ETHMACRST_R {
ETHMACRST_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 29 - USB OTG HS module reset"]
#[inline(always)]
pub fn otghsrst(&self) -> OTGHSRST_R {
OTGHSRST_R::new(((self.bits >> 29) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - IO port A reset"]
#[inline(always)]
#[must_use]
pub fn gpioarst(&mut self) -> GPIOARST_W<AHB1RSTR_SPEC, 0> {
GPIOARST_W::new(self)
}
#[doc = "Bit 1 - IO port B reset"]
#[inline(always)]
#[must_use]
pub fn gpiobrst(&mut self) -> GPIOBRST_W<AHB1RSTR_SPEC, 1> {
GPIOBRST_W::new(self)
}
#[doc = "Bit 2 - IO port C reset"]
#[inline(always)]
#[must_use]
pub fn gpiocrst(&mut self) -> GPIOCRST_W<AHB1RSTR_SPEC, 2> {
GPIOCRST_W::new(self)
}
#[doc = "Bit 3 - IO port D reset"]
#[inline(always)]
#[must_use]
pub fn gpiodrst(&mut self) -> GPIODRST_W<AHB1RSTR_SPEC, 3> {
GPIODRST_W::new(self)
}
#[doc = "Bit 4 - IO port E reset"]
#[inline(always)]
#[must_use]
pub fn gpioerst(&mut self) -> GPIOERST_W<AHB1RSTR_SPEC, 4> {
GPIOERST_W::new(self)
}
#[doc = "Bit 5 - IO port F reset"]
#[inline(always)]
#[must_use]
pub fn gpiofrst(&mut self) -> GPIOFRST_W<AHB1RSTR_SPEC, 5> {
GPIOFRST_W::new(self)
}
#[doc = "Bit 6 - IO port G reset"]
#[inline(always)]
#[must_use]
pub fn gpiogrst(&mut self) -> GPIOGRST_W<AHB1RSTR_SPEC, 6> {
GPIOGRST_W::new(self)
}
#[doc = "Bit 7 - IO port H reset"]
#[inline(always)]
#[must_use]
pub fn gpiohrst(&mut self) -> GPIOHRST_W<AHB1RSTR_SPEC, 7> {
GPIOHRST_W::new(self)
}
#[doc = "Bit 8 - IO port I reset"]
#[inline(always)]
#[must_use]
pub fn gpioirst(&mut self) -> GPIOIRST_W<AHB1RSTR_SPEC, 8> {
GPIOIRST_W::new(self)
}
#[doc = "Bit 12 - CRC reset"]
#[inline(always)]
#[must_use]
pub fn crcrst(&mut self) -> CRCRST_W<AHB1RSTR_SPEC, 12> {
CRCRST_W::new(self)
}
#[doc = "Bit 21 - DMA2 reset"]
#[inline(always)]
#[must_use]
pub fn dma1rst(&mut self) -> DMA1RST_W<AHB1RSTR_SPEC, 21> {
DMA1RST_W::new(self)
}
#[doc = "Bit 22 - DMA2 reset"]
#[inline(always)]
#[must_use]
pub fn dma2rst(&mut self) -> DMA2RST_W<AHB1RSTR_SPEC, 22> {
DMA2RST_W::new(self)
}
#[doc = "Bit 25 - Ethernet MAC reset"]
#[inline(always)]
#[must_use]
pub fn ethmacrst(&mut self) -> ETHMACRST_W<AHB1RSTR_SPEC, 25> {
ETHMACRST_W::new(self)
}
#[doc = "Bit 29 - USB OTG HS module reset"]
#[inline(always)]
#[must_use]
pub fn otghsrst(&mut self) -> OTGHSRST_W<AHB1RSTR_SPEC, 29> {
OTGHSRST_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "AHB1 peripheral reset register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb1rstr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb1rstr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct AHB1RSTR_SPEC;
impl crate::RegisterSpec for AHB1RSTR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ahb1rstr::R`](R) reader structure"]
impl crate::Readable for AHB1RSTR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ahb1rstr::W`](W) writer structure"]
impl crate::Writable for AHB1RSTR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets AHB1RSTR to value 0"]
impl crate::Resettable for AHB1RSTR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::io::{BufRead, BufReader, Error};
use std::fs::File;
use std::iter::FromIterator;
use std::collections::VecDeque;
#[derive(Clone, Debug, Copy, Eq, PartialEq)]
enum GridEntry {
Empty,
Near(usize),
Neutral,
Infinity,
}
pub fn advent6b() -> Result<usize, Error> {
let f = File::open("input6.txt")?;
let mut points: Vec<(usize, usize)> = Vec::new();
let (max_x, max_y) = load_points(&f, &mut points)?;
let mut grid_raw = vec![GridEntry::Empty; (max_x + 1) * (max_y + 1)];
let mut grid_base: Vec<_> = grid_raw.as_mut_slice().chunks_mut(max_y + 1).collect();
let grid: &mut [&mut [_]] = grid_base.as_mut_slice();
for px in 0..grid.len() {
for py in 0..grid[px].len() {
grid[px][py] = GridEntry::Near(points.iter().map(|&x| manhattan_distance(x, (px, py))).sum());
}
}
let predicate = |x| {
match x {
GridEntry::Near(n) => n < 10000,
_ => false
}
};
for px in 0..grid.len() {
for py in 0..grid[px].len() {
flood_fill(grid, (px, py), predicate, GridEntry::Infinity);
}
}
let mut count = 0;
for px in 0..grid.len() {
for py in 0..grid[px].len() {
if grid[px][py] == GridEntry::Infinity {
count += 1
}
}
}
return Ok(count);
}
pub fn advent6a() -> Result<usize, Error> {
let f = File::open("input6.txt")?;
let mut points: Vec<(usize, usize)> = Vec::new();
let (max_x, max_y) = load_points(&f, &mut points)?;
let mut grid_raw = vec![GridEntry::Empty; (max_x + 1) * (max_y + 1)];
let mut grid_base: Vec<_> = grid_raw.as_mut_slice().chunks_mut(max_y + 1).collect();
let mut grid: &mut [&mut [_]] = grid_base.as_mut_slice();
for px in 0..grid.len() {
for py in 0..grid[0].len() {
let mut distances = Vec::from_iter(
points.iter().enumerate()
.map(|x| (manhattan_distance((px, py), ((x.1).0, (x.1).1)), x.0)));
distances.sort_by(|a, b| a.0.cmp(&b.0));
let entry = if distances[0].0 == distances[1].0 {
GridEntry::Neutral
} else {
GridEntry::Near(distances[0].1)
};
grid[px][py] = entry;
}
}
for px in 0..grid.len() {
let target = grid[px][0];
if target != GridEntry::Infinity {
flood_fill(&mut grid, (px, 0), |x| x == target, GridEntry::Infinity);
}
}
for px in 0..grid.len() {
let last_y = grid[px].len() - 1;
let target = grid[px][last_y];
if target != GridEntry::Infinity {
flood_fill(&mut grid, (px, last_y), |x| x == target, GridEntry::Infinity);
}
}
for py in 0..grid[0].len() {
let target = grid[0][py];
if target != GridEntry::Infinity {
flood_fill(&mut grid, (0, py), |x| x == target, GridEntry::Infinity);
}
}
let x_last = grid.len() - 1;
for py in 0..grid[x_last].len() {
let target = grid[x_last][py];
if target != GridEntry::Infinity {
flood_fill(&mut grid, (x_last, py), |x| x == target, GridEntry::Infinity);
}
}
let mut counts: Vec<usize> = vec![0; points.len()];
for px in 0..grid.len() {
for py in 0..grid[0].len() {
match grid[px][py] {
GridEntry::Near(n) => counts[n] += 1,
_ => {}
}
}
}
return Ok(*counts.iter().max().unwrap_or(&0));
}
fn load_points(f: &File, points: &mut Vec<(usize, usize)>) -> Result<(usize, usize), Error> {
let mut max_x = 0;
let mut max_y = 0;
for buffer in BufReader::new(f).lines() {
let line = buffer?;
let x: usize;
let y: usize;
scan!(line.bytes() => "{}, {}", x, y);
max_x = max_x.max(x);
max_y = max_y.max(y);
points.push((x, y));
}
return Ok((max_x, max_y));
}
fn manhattan_distance(p: (usize, usize), q: (usize, usize)) -> usize {
let x_diff = if p.0 < q.0 { q.0 - p.0 } else { p.0 - q.0 };
let y_diff = if p.1 < q.1 { q.1 - p.1 } else { p.1 - q.1 };
return x_diff + y_diff;
}
fn flood_fill<P>(grid: &mut [&mut [GridEntry]], node: (usize, usize), predicate: P, replacement: GridEntry) where
P: Fn(GridEntry) -> bool {
//fn flood_fill(grid: &mut [&mut [GridEntry]], node: (usize, usize), target: GridEntry, replacement: GridEntry) {
if !predicate(grid[node.0][node.1]) {
return;
}
grid[node.0][node.1] = replacement;
let mut node_q: VecDeque<(usize, usize)> = VecDeque::new();
node_q.push_back(node);
loop {
let current_node = node_q.pop_front();
if current_node.is_none() {
break;
}
let n = current_node.unwrap();
if 1 <= n.0 {
let west = (n.0 - 1, n.1);
check_paint_queue_node(grid, west, &predicate, replacement, &mut node_q)
}
if n.0 < (grid.len() - 1) {
let east = (n.0 + 1, n.1);
check_paint_queue_node(grid, east, &predicate, replacement, &mut node_q)
}
if 1 <= n.1 {
let north = (n.0, n.1 - 1);
check_paint_queue_node(grid, north, &predicate, replacement, &mut node_q)
}
if n.1 < (grid[n.0].len() - 1) {
let south = (n.0, n.1 + 1);
check_paint_queue_node(grid, south, &predicate, replacement, &mut node_q)
}
}
}
fn check_paint_queue_node<P>(
grid: &mut [&mut [GridEntry]],
candidate: (usize, usize),
predicate: &P, replacement: GridEntry,
node_q: &mut VecDeque<(usize, usize)>) where
P: Fn(GridEntry) -> bool {
if predicate(grid[candidate.0][candidate.1]) {
grid[candidate.0][candidate.1] = replacement;
node_q.push_back(candidate);
}
} |
extern crate cgmath;
use cgmath::Vector3;
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct Ray {
pub origin: Vector3<f64>,
pub direction: Vector3<f64>,
}
impl Ray {
pub fn point(&self, t: f64) -> Vector3<f64> {
self.origin + self.direction * t
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn has_origin_and_direction() {
let origin = Vector3::new(0.0, 0.0, 2.0);
let direction = Vector3::new(0.0, 1.0, -1.0);
let r = Ray {
origin: origin,
direction: direction,
};
assert_eq!(r.origin, origin);
assert_eq!(r.direction, direction);
}
#[test]
fn has_point_at_position() {
let origin = Vector3::new(0.0, 0.0, 0.0);
let direction = Vector3::new(1.0, 1.0, -1.0);
let r = Ray {
origin: origin,
direction: direction,
};
assert_eq!(r.point(0.0), r.origin);
assert_eq!(r.point(1.0), Vector3::new(1.0, 1.0, -1.0,));
assert_eq!(r.point(-15.0), Vector3::new(-15.0, -15.0, 15.0,));
assert_eq!(r.point(0.5), Vector3::new(0.5, 0.5, -0.5,));
}
}
|
use near_contract_standards::fungible_token;
use near_sdk::borsh::{self, BorshDeserialize, BorshSerialize};
use near_sdk::env::log;
use near_sdk::serde_json::{json, from_slice};
use near_sdk::{AccountId, Balance, PromiseOrValue, PublicKey, env, near_bindgen, setup_alloc, ext_contract, log};
use near_sdk::collections::{LookupMap, UnorderedMap};
use std::collections::HashMap;
use std::convert::TryInto;
use std::fmt::Debug;
use std::hash::Hash;
use std::primitive;
use near_sdk::serde::{Serialize, Deserialize};
use near_sdk::{BlockHeight, Gas, PanicOnDefault, Promise, PromiseResult};
use near_sdk::json_types::{Base58PublicKey, U128, U64, ValidAccountId};
use near_contract_standards::fungible_token::metadata::{FungibleTokenMetadata};
use near_contract_standards::fungible_token::receiver::FungibleTokenReceiver;
setup_alloc!();
#[near_bindgen]
#[derive(PanicOnDefault, BorshDeserialize, BorshSerialize)]
pub struct Airdrop {
private_key: String,
nonce: u64
}
#[derive(PanicOnDefault, Serialize, Deserialize)]
#[serde(crate = "near_sdk::serde")]
pub struct Info {
private_key: String,
nonce: u64
}
#[near_bindgen]
impl Airdrop {
#[init]
pub fn new() -> Self {
Self {
private_key: "".to_string(),
nonce: 0
}
}
/// Claim tokens for specific account that are attached to the public key this tx is signed with.
pub fn claim(&mut self, receiver: AccountId) {
log!("{}, {}", env::predecessor_account_id(), env::current_account_id());
assert_eq!(
env::predecessor_account_id(),
env::current_account_id(),
"Claim only can come from this account"
);
Promise::new(receiver).transfer(1000000000000000000000000);
self.nonce += 1;
}
pub fn set_info(&mut self, private_key: String, public_key: PublicKey, nonce: U64) {
let nonce: u64 = nonce.into();
self.private_key = private_key;
self.nonce = nonce + env::block_index() * 1000000 + 1;
Promise::new(env::current_account_id()).add_access_key_with_nonce(public_key, 1000000000000000000000000, env::current_account_id(), b"claim".to_vec(), nonce as u64);
}
pub fn get_info(&self) -> Info {
Info {
private_key: self.private_key.clone(),
nonce: self.nonce.into()
}
}
// pub fn create_account_and_claim(&mut self, creator: AccountId, index: u32, receiver: AccountId, pbk: PublicKey) {
// assert_eq!(
// env::predecessor_account_id(),
// env::current_account_id(),
// "Claim only can come from this account"
// );
// let task = self.users.get_mut(&creator).unwrap().tasks.get_mut(index as usize).unwrap();
// assert!(task.claimed_account.get(&receiver).is_none(), "Already claimed");
// Promise::new(receiver.clone()).create_account().add_full_access_key(pbk);
// ext_fungible_token::ft_transfer(receiver.clone(), U128::from(task.amount_per_account), None, &task.token_id, 0, env::prepaid_gas() / 2);
// task.claimed_account.insert(receiver, task.amount_per_account);
// task.deposit_near -= env::prepaid_gas() as u128;
// task.deposit_token -= task.amount_per_account;
// }
// #[result_serializer(borsh)]
// #[private]
// pub fn on_add_token(
// &mut self,
// token_id: AccountId,
// #[callback_vec]
// #[serializer(borsh)]
// metadata: Result<u8, PromiseError>,
// ) {
// self.tokens.insert(&token_id, &metadata);
// }
}
/*
* The rest of this file holds the inline tests for the code above
* Learn more about Rust tests: https://doc.rust-lang.org/book/ch11-01-writing-tests.html
*
* To run from contract directory:
* cargo test -- --nocapture
*
* From project root, to run in combination with frontend tests:
* yarn test
*
*/
#[cfg(test)]
mod tests {
use super::*;
use near_sdk::MockedBlockchain;
use near_sdk::{testing_env, VMContext};
// mock the context for testing, notice "signer_account_id" that was accessed above from env::
fn get_context(input: Vec<u8>, is_view: bool) -> VMContext {
VMContext {
current_account_id: "alice_near".to_string(),
signer_account_id: "bob_near".to_string(),
signer_account_pk: vec![0, 1, 2],
predecessor_account_id: "carol_near".to_string(),
input,
block_index: 0,
block_timestamp: 0,
account_balance: 0,
account_locked_balance: 0,
storage_usage: 0,
attached_deposit: 0,
prepaid_gas: 10u64.pow(18),
random_seed: vec![0, 1, 2],
is_view,
output_data_receivers: vec![],
epoch_height: 19,
}
}
#[test]
fn set_then_get_greeting() {
let context = get_context(vec![], false);
testing_env!(context);
//let mut contract = Airdrop::new();
//contract.add_token("dev-1632295283892-86288685865300".to_string());
}
#[test]
fn get_default_greeting() {
}
}
|
#[cfg(target_os = "linux")]
fn are_you_on_linux() {
println!("You are running linux!");
}
#[cfg(not(target_os = "linux"))]
fn are_you_on_linux() {
println!("You are *not* running linux!");
}
#[cfg(target_os = "macos")]
fn are_you_on_macos() {
println!("You are running macos!");
}
#[cfg(not(target_os = "macos"))]
fn are_you_on_macos() {
println!("You are *not* running macos!");
}
fn main() {
are_you_on_macos();
are_you_on_linux();
println!("Are you sure?");
if cfg!(target_os = "linux") {
println!("Yes. It's definitely linux!");
} else {
println!("Yes. It's definitely *not* linux!");
}
} |
#[cfg(test)]
mod tests {
use avi_rs::bytes::{BigEndian, LittleEndian};
#[test]
fn byteorder() {
let mut buf = [0u8;4];
let u = 43608830 as u32;
BigEndian::write_u32(u, &mut buf, 0);
let r = BigEndian::read_u32(&buf, 0);
println!("Write BigEndian: {:?}", buf);
println!("Read BigEndian: {}", r);
LittleEndian::write_u32(u, &mut buf, 0);
let r1 = LittleEndian::read_u32(&buf, 0);
println!("Write LittleEndian: {:?}", buf);
println!("Read LittleEndian: {}", r1);
assert_eq!(u, r);
assert_eq!(u, r1);
}
#[test]
fn parse_stream_index() {
let buf = b"19";
let str = std::str::from_utf8(buf).expect("Failed to parse Utf8!");
let n: u8 = str.parse().expect("Not a number!");
println!("The number is: {}", n);
assert_eq!(n, 19);
}
#[test]
fn signed() {
let mut buf = [0u8;4];
let i = -43608830;
BigEndian::write_i32(i, &mut buf, 0);
let r = BigEndian::read_i32(&buf, 0);
println!("Write BigEndian: {:?}", buf);
println!("Read BigEndian: {}", r);
LittleEndian::write_i32(i, &mut buf, 0);
let r1 = LittleEndian::read_i32(&buf, 0);
println!("Write LittleEndian: {:?}", buf);
println!("Read LittleEndian: {}", r1);
assert_eq!(i, r);
assert_eq!(i, r1);
}
}
|
use std::fmt;
use std::fmt::{Display, Formatter};
use std::str::FromStr;
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Instruction {
Acc(isize),
Jmp(isize),
Nop(isize),
}
impl Instruction {
pub fn exec(&self, m: &mut RegisterFile) {
match self {
Instruction::Nop(_) => m.ip += 1,
Instruction::Acc(x) => {
m.acc += x;
m.ip += 1;
}
Instruction::Jmp(x) => {
m.ip = (m.ip as isize + x) as usize;
}
};
}
pub fn parse_prog(input: &str) -> Vec<Self> {
input
.lines()
.map(|l| l.parse().expect("Could not parse instruction"))
.collect()
}
}
impl Display for Instruction {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
Instruction::Acc(x) => write!(f, "acc {}", x),
Instruction::Jmp(x) => write!(f, "jmp {}", x),
Instruction::Nop(x) => write!(f, "nop {}", x),
}
}
}
impl FromStr for Instruction {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.splitn(2, " ");
let op = parts
.next()
.ok_or_else(|| Self::Err::msg("Missing instruction"))?;
let arg = parts.next().unwrap().parse()?;
match op {
"acc" => Ok(Instruction::Acc(arg)),
"jmp" => Ok(Instruction::Jmp(arg)),
"nop" => Ok(Instruction::Nop(arg)),
s => Err(Self::Err::msg(format!("Bad input {}", s))),
}
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct RegisterFile {
pub ip: usize,
pub acc: isize,
}
impl RegisterFile {
pub fn new() -> Self {
Self { acc: 0, ip: 0 }
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct Machine {
reg: RegisterFile,
}
pub trait Watcher {
fn log(&mut self, ins: &Instruction, reg: &RegisterFile);
fn check_abort(&self, ins: &Instruction, reg: &RegisterFile) -> bool;
fn dump_log<'a, L: IntoIterator<Item = &'a Instruction>>(log: L) {
for ins in log {
println!("{}", ins);
}
}
}
impl Machine {
pub fn new() -> Self {
Self {
reg: RegisterFile::new(),
}
}
pub fn run_debug<W: Watcher>(
&mut self,
prog: &[Instruction],
watcher: &mut W,
) -> Result<RegisterFile, RegisterFile> {
while let Some(ins) = prog.get(self.reg.ip) {
if watcher.check_abort(ins, &self.reg) {
return Err(self.reg);
}
watcher.log(ins, &self.reg);
ins.exec(&mut self.reg);
}
if self.reg.ip == prog.len() {
Ok(self.reg)
} else {
Err(self.reg)
}
}
pub fn run(&mut self, prog: &[Instruction]) -> Result<RegisterFile, RegisterFile> {
while let Some(ins) = prog.get(self.reg.ip) {
ins.exec(&mut self.reg);
}
if self.reg.ip == prog.len() {
Ok(self.reg)
} else {
Err(self.reg)
}
}
}
#[cfg(test)]
mod tests {
use crate::{Instruction, Machine, RegisterFile, Watcher};
const PROG: &str = r#"nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
nop -4
acc +6"#;
#[test]
fn test_parse_run() {
let prog = Instruction::parse_prog(PROG);
let mut m = Machine::new();
if let Ok(reg) = m.run(&prog) {
assert_eq!(8, reg.acc);
} else {
panic!("Failed to run program");
}
}
#[test]
fn test_run_debug() {
let prog = Instruction::parse_prog(PROG);
let mut m = Machine::new();
struct CountWatch(i32);
impl Watcher for CountWatch {
fn log(&mut self, _: &Instruction, _: &RegisterFile) {
self.0 += 1
}
fn check_abort(&self, _: &Instruction, _: &RegisterFile) -> bool {
false
}
}
let mut w = CountWatch(0);
if let Ok(reg) = m.run_debug(&prog, &mut w) {
assert_eq!(8, reg.acc);
assert_eq!(6, w.0);
} else {
panic!("Failed to run program");
}
}
}
|
//! Convert the radix (base) of digits stored in a vector.
//!
//! * pure rust, no bigint deps or intermediate conversions
//! * designed around vectors of unsigned integer types, not strings
//! * very fast on large vectors when bases are aligned
//! (see performance section below)
//!
//! # examples
//!
//! convert base 4 data stored in a `Vec<u8>` to base 500 data stored in a
//! `Vec<u16>`:
//!
//! ``` rust
//! use convert_base::Convert;
//!
//! fn main () {
//! let mut base = Convert::new(4,500);
//! let output = base.convert::<u8,u16>(&vec![1,1,1,1,2,2,1,0,2,2,0,0,2,1]);
//! assert_eq!{output, vec![397, 150, 405]};
//! }
//! ```
//!
//! or convert a `Vec<u32>` of base `4_000_000_000` to a `Vec<u16>` of base 700:
//!
//! ``` rust
//! use convert_base::Convert;
//!
//! fn main () {
//! let mut base = Convert::new(4_000_000_000,700);
//! let output = base.convert::<u32,u16>(&vec![
//! 3_900_000_000, 3_500_004_500, 3_000_000_000, 2_500_000_000,
//! 2_000_000_000, 1_500_000_000, 1_000_003_300, 2_500_000_000,
//! 3_000_000_000, 3_700_050_000, 2_400_000_000, 1_250_000_052
//! ]);
//! assert_eq![output, vec!{
//! 300, 71, 255, 325, 23, 591, 267, 188, 488, 553, 124, 54, 422, 411, 116,
//! 411, 85, 558, 4, 498, 384, 106, 465, 635, 75, 120, 226, 18, 634, 631,
//! 116, 464, 111, 679, 17, 382, 67, 99, 208, 164, 8
//! }];
//! }
//! ```
//!
//! For input and output vectors, the least significant digit is at the
//! beginning of the array.
//!
//! Internally, a u64 is used to hold intermediate calculations such as adding
//! two digits or performing carries. You will probably run out of precision if
//! you have an input or output base that is close to the maximum u64 value.
//!
//! # performance
//!
//! When the bases are "aligned" the base conversion can be very fast. But
//! converting long vectors between unaligned bases can be very slow.
//!
//! Two bases are "aligned" when two integers `a` and `b` satisfy the equation
//! `base1.pow(a) == base2.pow(b)`. This ratio of `a:b` describes how bases can
//! cleanly overlap. For example 3 digits in base 256 corresponds exactly to 4
//! digits in base 64. Or 2 digits in base 243 corresponds exactly to 10 digits
//! in base 3 (because `243.pow(2) == 3.pow(10)`).
//!
//! On this old 2014 laptop, converting `5_000` digits:
//!
//! * from base 243 to base 9 in 0.00234 seconds
//! * from base 243 to base 10 in 1.26 seconds
//!
//! and converting `50_000` digits:
//!
//! * from base 243 to base 9 in 0.0149 seconds
//! * from base 243 to base 10 in 125.3 seconds
use std::ops::{Add,Div,Rem};
/// Convert the radix (base) of digits stored in a vector.
pub struct Convert {
from: u64,
to: u64,
ratio: (usize,usize)
}
impl Convert {
/// Create a new converter with `from` and `to` bases.
pub fn new (from: u64, to: u64) -> Self {
let mut ratio = (0,0);
if from % to == 0 || to % from == 0 {
let max_i = 128 / ulog2(to.max(from));
let mut j = 0;
let mut k = 0;
let f = from as u128;
let t = to as u128;
for i in 0..max_i {
let f_j = f.pow(j);
let t_k = t.pow(k);
if i > 0 && f_j == t_k {
ratio.0 = j as usize;
ratio.1 = k as usize;
break
} else if f_j < t_k || (i == 0 && from > to) {
j += 1
} else { k+=1 }
}
}
Convert { from, to, ratio }
}
/// Create a new converter but don't test for alignment.
pub fn new_unaligned (from: u64, to: u64) -> Self {
Convert { from, to, ratio: (0,0) }
}
/// Perform the conversion on `input` which contains digits in base
/// `self.from`. You should specify the `Output` type so that the target base
/// (`self.to`) fits. There are no checks to ensure the `Output` type has
/// room.
///
/// For input and output vectors, the least significant digit is at the
/// beginning of the array.
pub fn convert<Input,Output> (&mut self, input: &Vec<Input>) -> Vec<Output>
where Output: Copy+Into<u64>+From<u8>+FromU64
+Add<Output,Output=Output>+Div<Output,Output=Output>+Rem<Output,Output=Output>,
Input: Copy+Into<u64> {
let len = input.len();
let cap = len*ulog2(self.from)/ulog2(self.to);
let mut output: Vec<Output> = Vec::with_capacity(cap);
let mut base: Vec<Output> = vec![1u8.into()];
let mut v0: Vec<Output> = vec![];
let step = self.ratio.0;
let mut offset = 0;
for (i,x) in input.iter().enumerate() {
Self::copy(&mut v0, &base);
self.multiply_scalar_into(&mut v0, (*x).into());
self.add_into(&mut output, &v0, offset);
if i+1 < input.len() {
self.multiply_scalar_into(&mut base, self.from);
}
if step > 0 && i%step == step-1 {
base.clear();
base.push(1u8.into());
offset += self.ratio.1;
}
}
output
}
fn copy<T> (dst: &mut Vec<T>, src: &Vec<T>) -> () where T: Copy {
dst.clear();
for x in src.iter() {
dst.push(*x);
}
}
fn multiply_scalar_into<T> (&self, dst: &mut Vec<T>, x: u64) -> ()
where T: Copy+Into<u64>+FromU64 {
let mut carry = 0u64;
for i in 0..dst.len() {
let res = dst[i].into() * x + carry;
carry = res / self.to;
dst[i] = FromU64::from(res % (self.to as u64));
}
while carry > 0 {
dst.push(FromU64::from(carry % self.to));
carry /= self.to;
}
}
fn add_into<T> (&self, dst: &mut Vec<T>, src: &Vec<T>, offset: usize) -> ()
where T: Copy+Into<u64>+FromU64
+Add<T,Output=T>+Div<T,Output=T>+Rem<T,Output=T> {
let mut carry = 0u64;
let mut i = 0;
while dst.len().max(offset)-offset < src.len() {
dst.push(FromU64::from(0));
}
loop {
let j = i + offset;
if i < src.len() && j < dst.len() {
let res = src[i].into() + dst[j].into() + carry;
carry = res / self.to;
dst[j] = FromU64::from(res % self.to);
} else if j < dst.len() {
let res = dst[j].into() + carry;
carry = res / self.to;
dst[j] = FromU64::from(res % self.to);
} else if i < src.len() {
let res = src[i].into() + carry;
carry = res / self.to;
dst.push(FromU64::from(res % self.to));
} else if carry > 0 {
let res = carry;
carry = res / self.to;
dst.push(FromU64::from(res % self.to));
} else {
break;
}
i += 1;
}
}
}
fn ulog2 (x: u64) -> usize { (63-x.leading_zeros()) as usize }
// custom trait because TryFrom is difficult:
#[doc(hidden)]
pub trait FromU64 { fn from (n: u64) -> Self; }
impl FromU64 for u8 { fn from (n: u64) -> Self { n as u8 } }
impl FromU64 for u16 { fn from (n: u64) -> Self { n as u16 } }
impl FromU64 for u32 { fn from (n: u64) -> Self { n as u32 } }
impl FromU64 for u64 { fn from (n: u64) -> Self { n as u64 } }
|
// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
// TODO: add diagram
#![allow(dead_code)]
pub mod buffer_manager;
pub mod commit_phase;
pub mod errors;
pub mod execution_phase;
pub mod ordering_state_computer;
pub mod pipeline_phase;
pub mod signing_phase;
#[cfg(test)]
mod tests;
|
use solana_program::{
account_info::{next_account_info, AccountInfo},
entrypoint::ProgramResult,
msg,
program::{invoke, invoke_signed},
program_error::ProgramError,
program_pack::{IsInitialized, Pack},
pubkey::Pubkey,
rent::Rent,
sysvar::Sysvar,
};
use borsh::{BorshDeserialize, BorshSerialize};
use spl_token::state::Account;
use std::time::{SystemTime, UNIX_EPOCH};
use crate::{
error::PerpetualSwapError, instruction::PerpetualSwapInstruction, state::PerpetualSwap,
};
pub struct Processor;
impl Processor {
/// Unpacks a spl_token `Account`.
pub fn unpack_token_account(
account_info: &AccountInfo,
token_program_id: &Pubkey,
) -> Result<spl_token::state::Account, PerpetualSwapError> {
if account_info.owner != token_program_id {
Err(PerpetualSwapError::IncorrectTokenProgramId)
} else {
spl_token::state::Account::unpack(&account_info.data.borrow())
.map_err(|_| PerpetualSwapError::ExpectedAccount)
}
}
/// Unpacks a spl_token `Mint`.
pub fn unpack_mint(
account_info: &AccountInfo,
token_program_id: &Pubkey,
) -> Result<spl_token::state::Mint, PerpetualSwapError> {
if account_info.owner != token_program_id {
Err(PerpetualSwapError::IncorrectTokenProgramId)
} else {
spl_token::state::Mint::unpack(&account_info.data.borrow())
.map_err(|_| PerpetualSwapError::ExpectedMint)
}
}
/// Calculates the authority id by generating a program address.
pub fn authority_id(
program_id: &Pubkey,
my_info: &Pubkey,
nonce: u8,
) -> Result<Pubkey, PerpetualSwapError> {
Pubkey::create_program_address(&[&my_info.to_bytes()[..32], &[nonce]], program_id)
.or(Err(PerpetualSwapError::InvalidProgramAddress))
}
/// Issue a spl_token `MintTo` instruction.
pub fn token_mint_to<'a>(
swap: &Pubkey,
token_program: AccountInfo<'a>,
mint: AccountInfo<'a>,
destination: AccountInfo<'a>,
authority: AccountInfo<'a>,
nonce: u8,
amount: u64,
) -> Result<(), ProgramError> {
let swap_bytes = swap.to_bytes();
let authority_signature_seeds = [&swap_bytes[..32], &[nonce]];
let signers = &[&authority_signature_seeds[..]];
let ix = spl_token::instruction::mint_to(
token_program.key,
mint.key,
destination.key,
authority.key,
&[],
amount,
)?;
invoke_signed(&ix, &[mint, destination, authority, token_program], signers)
}
/// Issue a spl_token `Transfer` instruction.
pub fn token_transfer<'a>(
swap: &Pubkey,
token_program: AccountInfo<'a>,
source: AccountInfo<'a>,
destination: AccountInfo<'a>,
authority: AccountInfo<'a>,
nonce: u8,
amount: u64,
) -> Result<(), ProgramError> {
let swap_bytes = swap.to_bytes();
let authority_signature_seeds = [&swap_bytes[..32], &[nonce]];
let signers = &[&authority_signature_seeds[..]];
let ix = spl_token::instruction::transfer(
token_program.key,
source.key,
destination.key,
authority.key,
&[],
amount,
)?;
invoke_signed(
&ix,
&[source, destination, authority, token_program],
signers,
)
}
pub fn initialize_account<'a>(
account: AccountInfo<'a>,
mint: AccountInfo<'a>,
owner: AccountInfo<'a>,
rent: AccountInfo<'a>,
token_program: AccountInfo<'a>,
) -> Result<(), ProgramError> {
let ix = spl_token::instruction::initialize_account(
token_program.key,
account.key,
mint.key,
owner.key,
)?;
invoke(&ix, &[account, mint, owner, rent, token_program])
}
pub fn process(
program_id: &Pubkey,
accounts: &[AccountInfo],
instruction_data: &[u8],
) -> ProgramResult {
let instruction = PerpetualSwapInstruction::unpack(instruction_data)?;
match instruction {
PerpetualSwapInstruction::InitializePerpetualSwap {
nonce,
funding_rate,
minimum_margin,
liquidation_bounty,
minimum_funding_period,
} => {
msg!("Instruction: InitializePerpetualSwap");
Self::process_initialize_perpetual_swap(
program_id,
nonce,
funding_rate,
minimum_margin,
liquidation_bounty,
minimum_funding_period,
accounts,
)
}
PerpetualSwapInstruction::InitializeSide { amount_to_deposit } => {
msg!("Instruction: InitializeSide");
Self::process_initialize_side(program_id, amount_to_deposit, accounts)
}
PerpetualSwapInstruction::DepositToMargin { amount_to_deposit } => {
msg!("Instruction: DepositToMargin");
Self::process_deposit_to_margin(program_id, amount_to_deposit, accounts)
}
PerpetualSwapInstruction::WithdrawFromMargin { amount_to_withdraw } => {
msg!("Instruction: WithdrawFromMargin");
Self::process_withdraw_from_margin(program_id, amount_to_withdraw, accounts)
}
PerpetualSwapInstruction::TransferLong { amount } => {
msg!("Instruction: TransferLong");
Self::process_transfer_long(program_id, amount, accounts)
}
PerpetualSwapInstruction::TransferShort { amount } => {
msg!("Instruction: TransferShort");
Self::process_transfer_short(program_id, amount, accounts)
}
PerpetualSwapInstruction::TryToLiquidate { collateral } => {
msg!("Instruction: TryToLiquidate");
Self::process_try_to_liquidate(program_id, collateral, accounts)
}
PerpetualSwapInstruction::TransferFunds {} => {
msg!("Instruction: TransferFunds");
Self::process_transfer_funds(program_id, accounts)
}
PerpetualSwapInstruction::UpdatePrices {
mark_price,
} => {
msg!("Instruction: UpdatePrices");
Self::process_update_prices(program_id, mark_price, accounts)
}
PerpetualSwapInstruction::OracleUpdateIndex { } => {
msg!("Instruction: oracleUpdateindex");
Self::process_oracle_update_index(accounts)
}
}
}
pub fn process_initialize_perpetual_swap(
program_id: &Pubkey,
nonce: u8,
funding_rate: f64,
minimum_margin: f64,
liquidation_bounty: f64,
minimum_funding_period: u128,
accounts: &[AccountInfo],
) -> ProgramResult {
let account_info_iter = &mut accounts.iter();
let perpetual_swap_info = next_account_info(account_info_iter)?;
let authority_info = next_account_info(account_info_iter)?;
let margin_long_info = next_account_info(account_info_iter)?;
let margin_short_info = next_account_info(account_info_iter)?;
let pool_mint_info = next_account_info(account_info_iter)?;
let rent_info = next_account_info(account_info_iter)?;
let token_program_info = next_account_info(account_info_iter)?;
let token_program_id = *token_program_info.key;
let mut perpetual_swap = PerpetualSwap::try_from_slice(&perpetual_swap_info.data.borrow())?;
// Check if the perpetual swap is already initialized
if perpetual_swap.is_initialized() {
return Err(PerpetualSwapError::AlreadyInUse.into());
}
// Check if pool account is rent-exempt
let rent = &Rent::from_account_info(rent_info)?;
if !rent.is_exempt(
perpetual_swap_info.lamports(),
perpetual_swap_info.data_len(),
) {
return Err(PerpetualSwapError::NotRentExempt.into());
}
// Check if the long margin account is already initialized
let long_margin_account = Account::unpack_unchecked(&margin_long_info.data.borrow())?;
if long_margin_account.is_initialized() {
return Err(PerpetualSwapError::AlreadyInUse.into());
}
// Check if the short margin account is already initialized
let short_margin_account = Account::unpack_unchecked(&margin_short_info.data.borrow())?;
if short_margin_account.is_initialized() {
return Err(PerpetualSwapError::AlreadyInUse.into());
}
let authority_pubkey = Self::authority_id(program_id, perpetual_swap_info.key, nonce)?;
if *authority_info.key != authority_pubkey {
return Err(PerpetualSwapError::InvalidAuthorityAccount.into());
}
Self::initialize_account(
perpetual_swap_info.clone(),
pool_mint_info.clone(),
authority_info.clone(),
rent_info.clone(),
token_program_info.clone(),
)?;
Self::initialize_account(
margin_long_info.clone(),
pool_mint_info.clone(),
authority_info.clone(),
rent_info.clone(),
token_program_info.clone(),
)?;
Self::initialize_account(
margin_short_info.clone(),
pool_mint_info.clone(),
authority_info.clone(),
rent_info.clone(),
token_program_info.clone(),
)?;
perpetual_swap.is_long_initialized = false;
perpetual_swap.is_short_initialized = false;
perpetual_swap.nonce = nonce;
perpetual_swap.token_program_id = token_program_id;
perpetual_swap.long_margin_pubkey = *margin_long_info.key;
perpetual_swap.short_margin_pubkey = *margin_short_info.key;
perpetual_swap.minimum_margin = minimum_margin;
perpetual_swap.liquidation_bounty = liquidation_bounty;
perpetual_swap.funding_rate = funding_rate;
perpetual_swap.minimum_funding_period = minimum_funding_period;
perpetual_swap
.serialize(&mut *perpetual_swap_info.data.borrow_mut())
.map_err(|e| e.into())
}
pub fn process_initialize_side(
program_id: &Pubkey,
amount_to_deposit: u64,
accounts: &[AccountInfo],
) -> ProgramResult {
let account_info_iter = &mut accounts.iter();
let perpetual_swap_info = next_account_info(account_info_iter)?;
let authority_info = next_account_info(account_info_iter)?;
let user_transfer_authority_info = next_account_info(account_info_iter)?;
let source_info = next_account_info(account_info_iter)?;
let margin_info = next_account_info(account_info_iter)?;
let token_program_info = next_account_info(account_info_iter)?;
let mut perpetual_swap = PerpetualSwap::try_from_slice(&perpetual_swap_info.data.borrow())?;
let source_account =
Self::unpack_token_account(margin_info, &perpetual_swap.token_program_id)?;
// TODO Add all the data checks
if perpetual_swap_info.owner != program_id {
return Err(ProgramError::IncorrectProgramId);
}
if *authority_info.key
!= Self::authority_id(program_id, perpetual_swap_info.key, perpetual_swap.nonce)?
{
return Err(PerpetualSwapError::InvalidProgramAddress.into());
}
if *token_program_info.key != perpetual_swap.token_program_id {
return Err(PerpetualSwapError::IncorrectTokenProgramId.into());
}
let is_long = *margin_info.key == perpetual_swap.long_margin_pubkey;
let is_short = *margin_info.key == perpetual_swap.short_margin_pubkey;
if !is_long && !is_short {
return Err(PerpetualSwapError::InvalidAccountKeys.into());
}
if 1.0 - ((amount_to_deposit as f64) / perpetual_swap.mark_price)
< perpetual_swap.minimum_margin
{
return Err(PerpetualSwapError::WouldBeLiquidated.into());
}
if source_account.amount < amount_to_deposit {
return Err(PerpetualSwapError::InsufficientFunds.into());
}
Self::token_transfer(
perpetual_swap_info.key,
token_program_info.clone(),
source_info.clone(),
margin_info.clone(),
user_transfer_authority_info.clone(),
perpetual_swap.nonce,
amount_to_deposit,
)?;
if is_long {
perpetual_swap.long_account_pubkey = *source_info.key;
perpetual_swap.is_long_initialized = true;
} else {
perpetual_swap.short_account_pubkey = *source_info.key;
perpetual_swap.is_short_initialized = true;
}
// Start the funding rate interval only when both parties have been set
if perpetual_swap.is_initialized() {
// This is number of milliseconds since the epoch
perpetual_swap.reference_time = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_millis();
}
Ok(())
}
pub fn process_deposit_to_margin(
program_id: &Pubkey,
amount_to_deposit: u64,
accounts: &[AccountInfo],
) -> ProgramResult {
let account_info_iter = &mut accounts.iter();
let perpetual_swap_info = next_account_info(account_info_iter)?;
let authority_info = next_account_info(account_info_iter)?;
let user_transfer_authority_info = next_account_info(account_info_iter)?;
let source_info = next_account_info(account_info_iter)?;
let margin_info = next_account_info(account_info_iter)?;
let token_program_info = next_account_info(account_info_iter)?;
let perpetual_swap = PerpetualSwap::try_from_slice(&perpetual_swap_info.data.borrow())?;
let source_account =
Self::unpack_token_account(margin_info, &perpetual_swap.token_program_id)?;
// TODO Add all the data checks
if perpetual_swap_info.owner != program_id {
return Err(ProgramError::IncorrectProgramId);
}
if *authority_info.key
!= Self::authority_id(program_id, perpetual_swap_info.key, perpetual_swap.nonce)?
{
return Err(PerpetualSwapError::InvalidProgramAddress.into());
}
if *token_program_info.key != perpetual_swap.token_program_id {
return Err(PerpetualSwapError::IncorrectTokenProgramId.into());
}
let is_long = *margin_info.key == perpetual_swap.long_margin_pubkey
&& *source_info.key == perpetual_swap.long_account_pubkey;
let is_short = *margin_info.key == perpetual_swap.short_margin_pubkey
&& *source_info.key == perpetual_swap.short_account_pubkey;
if !is_long && !is_short {
return Err(PerpetualSwapError::InvalidAccountKeys.into());
}
if source_account.amount < amount_to_deposit {
return Err(PerpetualSwapError::InsufficientFunds.into());
}
Self::token_transfer(
perpetual_swap_info.key,
token_program_info.clone(),
source_info.clone(),
margin_info.clone(),
user_transfer_authority_info.clone(),
perpetual_swap.nonce,
amount_to_deposit,
)?;
Ok(())
}
pub fn process_withdraw_from_margin(
program_id: &Pubkey,
amount_to_withdraw: u64,
accounts: &[AccountInfo],
) -> ProgramResult {
let account_info_iter = &mut accounts.iter();
let perpetual_swap_info = next_account_info(account_info_iter)?;
let authority_info = next_account_info(account_info_iter)?;
let user_transfer_authority_info = next_account_info(account_info_iter)?;
let margin_info = next_account_info(account_info_iter)?;
let dest_info = next_account_info(account_info_iter)?;
let token_program_info = next_account_info(account_info_iter)?;
let perpetual_swap = PerpetualSwap::try_from_slice(&perpetual_swap_info.data.borrow())?;
let source_account =
Self::unpack_token_account(margin_info, &perpetual_swap.token_program_id)?;
// TODO add all the data checks
if perpetual_swap_info.owner != program_id {
return Err(ProgramError::IncorrectProgramId);
}
if *authority_info.key
!= Self::authority_id(program_id, perpetual_swap_info.key, perpetual_swap.nonce)?
{
return Err(PerpetualSwapError::InvalidProgramAddress.into());
}
if *token_program_info.key != perpetual_swap.token_program_id {
return Err(PerpetualSwapError::IncorrectTokenProgramId.into());
}
let is_long = *margin_info.key == perpetual_swap.long_margin_pubkey
&& *dest_info.key == perpetual_swap.long_account_pubkey;
let is_short = *margin_info.key == perpetual_swap.short_margin_pubkey
&& *dest_info.key == perpetual_swap.short_account_pubkey;
if !is_long && !is_short {
return Err(PerpetualSwapError::InvalidAccountKeys.into());
}
if 1.0 - (((source_account.amount - amount_to_withdraw) as f64) / perpetual_swap.mark_price)
< perpetual_swap.minimum_margin
{
return Err(PerpetualSwapError::WouldBeLiquidated.into());
}
Self::token_transfer(
perpetual_swap_info.key,
token_program_info.clone(),
margin_info.clone(),
dest_info.clone(),
user_transfer_authority_info.clone(),
perpetual_swap.nonce,
amount_to_withdraw,
)?;
Ok(())
}
pub fn process_transfer_long(
program_id: &Pubkey,
margin_amount: u64,
accounts: &[AccountInfo],
) -> ProgramResult {
let account_info_iter = &mut accounts.iter();
let perpetual_swap_info = next_account_info(account_info_iter)?;
let authority_info = next_account_info(account_info_iter)?;
let user_transfer_authority_info = next_account_info(account_info_iter)?;
let long_margin_info = next_account_info(account_info_iter)?;
let long_account_info = next_account_info(account_info_iter)?;
let new_account_info = next_account_info(account_info_iter)?;
let token_program_info = next_account_info(account_info_iter)?;
let mut perpetual_swap = PerpetualSwap::try_from_slice(&perpetual_swap_info.data.borrow())?;
let long_margin =
Self::unpack_token_account(long_margin_info, &perpetual_swap.token_program_id)?;
let long_account =
Self::unpack_token_account(long_account_info, &perpetual_swap.token_program_id)?;
let new_account =
Self::unpack_token_account(new_account_info, &perpetual_swap.token_program_id)?;
// TODO add more checks
if perpetual_swap_info.owner != program_id {
return Err(ProgramError::IncorrectProgramId);
}
if *authority_info.key
!= Self::authority_id(program_id, perpetual_swap_info.key, perpetual_swap.nonce)?
{
return Err(PerpetualSwapError::InvalidProgramAddress.into());
}
if *token_program_info.key != perpetual_swap.token_program_id {
return Err(PerpetualSwapError::IncorrectTokenProgramId.into());
}
if perpetual_swap.long_margin_pubkey != *long_margin_info.key
|| perpetual_swap.long_account_pubkey != *long_account_info.key
{
return Err(PerpetualSwapError::InvalidAccountKeys.into());
}
if long_account.mint != new_account.mint {
return Err(PerpetualSwapError::InvalidMints.into());
}
if 1.0 - (margin_amount as f64 / perpetual_swap.mark_price) < perpetual_swap.minimum_margin
{
return Err(PerpetualSwapError::InsufficientMargin.into());
}
Self::token_transfer(
perpetual_swap_info.key,
token_program_info.clone(),
long_margin_info.clone(),
long_account_info.clone(),
user_transfer_authority_info.clone(),
perpetual_swap.nonce,
long_margin.amount,
)?;
Self::token_transfer(
perpetual_swap_info.key,
token_program_info.clone(),
new_account_info.clone(),
long_margin_info.clone(),
user_transfer_authority_info.clone(),
perpetual_swap.nonce,
margin_amount,
)?;
perpetual_swap.long_account_pubkey = *new_account_info.key;
Ok(())
}
pub fn process_transfer_short(
program_id: &Pubkey,
margin_amount: u64,
accounts: &[AccountInfo],
) -> ProgramResult {
let account_info_iter = &mut accounts.iter();
let perpetual_swap_info = next_account_info(account_info_iter)?;
let authority_info = next_account_info(account_info_iter)?;
let user_transfer_authority_info = next_account_info(account_info_iter)?;
let short_margin_info = next_account_info(account_info_iter)?;
let short_account_info = next_account_info(account_info_iter)?;
let new_account_info = next_account_info(account_info_iter)?;
let token_program_info = next_account_info(account_info_iter)?;
let mut perpetual_swap = PerpetualSwap::try_from_slice(&perpetual_swap_info.data.borrow())?;
let short_margin_account =
Self::unpack_token_account(short_margin_info, &perpetual_swap.token_program_id)?;
// TODO add all the checks
if perpetual_swap_info.owner != program_id {
return Err(ProgramError::IncorrectProgramId);
}
if *authority_info.key
!= Self::authority_id(program_id, perpetual_swap_info.key, perpetual_swap.nonce)?
{
return Err(PerpetualSwapError::InvalidProgramAddress.into());
}
if *token_program_info.key != perpetual_swap.token_program_id {
return Err(PerpetualSwapError::IncorrectTokenProgramId.into());
}
if perpetual_swap.short_margin_pubkey != *short_margin_info.key
|| perpetual_swap.short_account_pubkey != *short_account_info.key
{
return Err(PerpetualSwapError::InvalidAccountKeys.into());
}
Self::token_transfer(
perpetual_swap_info.key,
token_program_info.clone(),
short_margin_info.clone(),
short_account_info.clone(),
user_transfer_authority_info.clone(),
perpetual_swap.nonce,
short_margin_account.amount,
)?;
Self::token_transfer(
perpetual_swap_info.key,
token_program_info.clone(),
new_account_info.clone(),
short_margin_info.clone(),
user_transfer_authority_info.clone(),
perpetual_swap.nonce,
margin_amount,
)?;
perpetual_swap.short_account_pubkey = *new_account_info.key;
Ok(())
}
pub fn process_transfer_funds(program_id: &Pubkey, accounts: &[AccountInfo]) -> ProgramResult {
let account_info_iter = &mut accounts.iter();
let perpetual_swap_info = next_account_info(account_info_iter)?;
let authority_info = next_account_info(account_info_iter)?;
let user_transfer_authority_info = next_account_info(account_info_iter)?;
let long_margin_info = next_account_info(account_info_iter)?;
let short_margin_info = next_account_info(account_info_iter)?;
let token_program_info = next_account_info(account_info_iter)?;
let mut perpetual_swap: PerpetualSwap =
PerpetualSwap::try_from_slice(&perpetual_swap_info.data.borrow())?;
if !perpetual_swap.is_initialized() {
return Err(PerpetualSwapError::AccountNotInitialized.into());
}
if perpetual_swap_info.owner != program_id {
return Err(ProgramError::IncorrectProgramId);
}
if *authority_info.key
!= Self::authority_id(program_id, perpetual_swap_info.key, perpetual_swap.nonce)?
{
return Err(PerpetualSwapError::InvalidProgramAddress.into());
}
if *token_program_info.key != perpetual_swap.token_program_id {
return Err(PerpetualSwapError::IncorrectTokenProgramId.into());
}
let now = SystemTime::now().duration_since(UNIX_EPOCH);
// This is number of milliseconds since the epoch
let transfer_time = now.unwrap().as_millis();
if perpetual_swap.reference_time > transfer_time {
return Err(PerpetualSwapError::InvalidTransferTime.into());
}
let time_since_last_transfer = transfer_time - perpetual_swap.reference_time;
if time_since_last_transfer < perpetual_swap.minimum_funding_period {
return Err(PerpetualSwapError::InvalidTransferTime.into());
}
// funding_rate = base_funding rate * (amount of time since last transfer) / (# of ms in 1 day)
let funding_interval = time_since_last_transfer as f64 / (24. * 60. * 60. * 1000.) as f64;
let funding_rate = perpetual_swap.funding_rate * funding_interval;
// TODO check for liquidation
if perpetual_swap.mark_price - perpetual_swap.index_price > 0.0 {
// This is subject to some rounding error
let funds_to_transfer =
((perpetual_swap.mark_price - perpetual_swap.index_price) * funding_rate) as u64;
Self::token_transfer(
perpetual_swap_info.key,
token_program_info.clone(),
long_margin_info.clone(),
short_margin_info.clone(),
user_transfer_authority_info.clone(),
perpetual_swap.nonce,
funds_to_transfer,
)?;
} else {
// This is subject to some rounding error
let funds_to_transfer =
((perpetual_swap.index_price - perpetual_swap.mark_price) * funding_rate) as u64;
Self::token_transfer(
perpetual_swap_info.key,
token_program_info.clone(),
short_margin_info.clone(),
long_margin_info.clone(),
user_transfer_authority_info.clone(),
perpetual_swap.nonce,
funds_to_transfer,
)?;
}
perpetual_swap.reference_time = transfer_time;
Ok(())
}
pub fn process_try_to_liquidate(
program_id: &Pubkey,
collateral: u64,
accounts: &[AccountInfo],
) -> ProgramResult {
let account_info_iter = &mut accounts.iter();
let perpetual_swap_info = next_account_info(account_info_iter)?;
let authority_info = next_account_info(account_info_iter)?;
let user_transfer_authority_info = next_account_info(account_info_iter)?;
let liquidated_margin_info = next_account_info(account_info_iter)?;
let liquidator_account_info = next_account_info(account_info_iter)?;
let insurance_account_info = next_account_info(account_info_iter)?;
let token_program_info = next_account_info(account_info_iter)?;
let perpetual_swap = PerpetualSwap::try_from_slice(&perpetual_swap_info.data.borrow())?;
if !perpetual_swap.is_initialized() {
return Err(PerpetualSwapError::AccountNotInitialized.into());
}
if perpetual_swap_info.owner != program_id {
return Err(ProgramError::IncorrectProgramId);
}
if *authority_info.key
!= Self::authority_id(program_id, perpetual_swap_info.key, perpetual_swap.nonce)?
{
return Err(PerpetualSwapError::InvalidProgramAddress.into());
}
if *token_program_info.key != perpetual_swap.token_program_id {
return Err(PerpetualSwapError::IncorrectTokenProgramId.into());
}
let liquidated_margin =
Self::unpack_token_account(liquidated_margin_info, &perpetual_swap.token_program_id)?;
let liquidator_account =
Self::unpack_token_account(liquidator_account_info, &perpetual_swap.token_program_id)?;
if 1.0 - ((liquidated_margin.amount as f64) / perpetual_swap.mark_price)
> perpetual_swap.minimum_margin
{
return Err(PerpetualSwapError::DoesNotNeedLiquidation.into());
}
if !(*liquidated_margin_info.key == perpetual_swap.long_margin_pubkey
|| *liquidated_margin_info.key == perpetual_swap.short_margin_pubkey)
{
return Err(PerpetualSwapError::InvalidAccountKeys.into());
}
let bounty = (perpetual_swap.liquidation_bounty * liquidated_margin.amount as f64) as u64;
if (1.0 - ((liquidator_account.amount + bounty) as f64) / perpetual_swap.mark_price)
< perpetual_swap.minimum_margin
{
return Err(PerpetualSwapError::InsufficientFunds.into());
}
let remaining_balance = liquidated_margin.amount - bounty;
// Liquidate the user who is past margin
Self::token_transfer(
perpetual_swap_info.key,
token_program_info.clone(),
liquidated_margin_info.clone(),
liquidator_account_info.clone(),
user_transfer_authority_info.clone(),
perpetual_swap.nonce,
bounty,
)?;
// Pay a liquidation fee to the insurance account
if remaining_balance > 0 {
Self::token_transfer(
perpetual_swap_info.key,
token_program_info.clone(),
liquidated_margin_info.clone(),
insurance_account_info.clone(),
user_transfer_authority_info.clone(),
perpetual_swap.nonce,
remaining_balance,
)?;
}
// Liquidator takes on the busted account position
Self::token_transfer(
perpetual_swap_info.key,
token_program_info.clone(),
liquidator_account_info.clone(),
liquidated_margin_info.clone(),
user_transfer_authority_info.clone(),
perpetual_swap.nonce,
collateral,
)?;
Ok(())
}
pub fn process_update_prices(
program_id: &Pubkey,
mark_price: f64,
accounts: &[AccountInfo],
) -> ProgramResult {
let account_info_iter = &mut accounts.iter();
let perpetual_swap_info = next_account_info(account_info_iter)?;
let authority_info = next_account_info(account_info_iter)?;
let token_program_info = next_account_info(account_info_iter)?;
let pyth_product_info = next_account_info(account_info_iter)?;
let pyth_price_info = next_account_info(account_info_iter)?;
let mut perpetual_swap = PerpetualSwap::try_from_slice(&perpetual_swap_info.data.borrow())?;
if !perpetual_swap.is_initialized() {
return Err(PerpetualSwapError::AccountNotInitialized.into());
}
if perpetual_swap_info.owner != program_id {
return Err(ProgramError::IncorrectProgramId);
}
if *authority_info.key
!= Self::authority_id(program_id, perpetual_swap_info.key, perpetual_swap.nonce)?
{
return Err(PerpetualSwapError::InvalidProgramAddress.into());
}
if *token_program_info.key != perpetual_swap.token_program_id {
return Err(PerpetualSwapError::IncorrectTokenProgramId.into());
}
let index_price = Self::pyth_handle_prices(
pyth_product_info,
pyth_price_info,
)?;
perpetual_swap.mark_price = mark_price;
perpetual_swap.index_price = index_price;
Ok(())
}
fn pyth_handle_prices(
pyth_product_info: &AccountInfo,
pyth_price_info: &AccountInfo,
) -> Result<f64, ProgramError> {
let pyth_product_data = &pyth_product_info.try_borrow_data()?;
let pyth_product = pyth_client::cast::<pyth_client::Product>(pyth_product_data);
if pyth_product.magic != pyth_client::MAGIC {
msg!("Pyth product account provided is not a valid Pyth account");
return Err(ProgramError::InvalidArgument.into());
}
if pyth_product.atype != pyth_client::AccountType::Product as u32 {
msg!("Pyth product account provided is not a valid Pyth product account");
return Err(ProgramError::InvalidArgument.into());
}
if pyth_product.ver != pyth_client::VERSION_1 {
msg!("Pyth product account provided has a different version than the Pyth client");
return Err(ProgramError::InvalidArgument.into());
}
if !pyth_product.px_acc.is_valid() {
msg!("Pyth product price account is invalid");
return Err(ProgramError::InvalidArgument.into());
}
let pyth_price_pubkey = Pubkey::new(&pyth_product.px_acc.val);
if &pyth_price_pubkey != pyth_price_info.key {
msg!("Pyth product price account does not match the Pyth price provided");
return Err(ProgramError::InvalidArgument.into());
}
let pyth_price_data = &pyth_price_info.try_borrow_data()?;
let pyth_price = pyth_client::cast::<pyth_client::Price>(pyth_price_data);
let price = pyth_price.agg.price as f64;
Ok(price)
}
pub fn process_oracle_update_index(
accounts: &[AccountInfo],
) -> ProgramResult {
let account_info_iter = &mut accounts.iter();
let perpetual_swap_info = next_account_info(account_info_iter)?;
let pyth_product_info = next_account_info(account_info_iter)?;
let pyth_price_info = next_account_info(account_info_iter)?;
let index_price = Self::pyth_handle_prices(
pyth_product_info,
pyth_price_info,
)?;
let mut perpetual_swap = PerpetualSwap::try_from_slice(&perpetual_swap_info.data.borrow())?;
perpetual_swap.index_price = index_price;
Ok(())
}
}
|
extern crate gbl;
#[macro_use]
extern crate criterion;
use criterion::{Bencher, Benchmark, Criterion, Throughput};
use gbl::{AesKey, AppImage, Gbl, P256KeyPair};
/// Includes a binary or text file from the test data directory.
macro_rules! test_data {
( bytes $file:tt ) => {
&include_bytes!(concat!("../test-data/", $file))[..]
};
( str $file:tt ) => {
include_str!(concat!("../test-data/", $file))
};
}
fn parse(c: &mut Criterion) {
let large = test_data!(bytes "large/large.gbl");
let parse = |data| move |b: &mut Bencher| b.iter(|| Gbl::parse(data).unwrap());
c.bench(
"parse large.gbl",
Benchmark::new("parse large.gbl", parse(large))
.throughput(Throughput::Bytes(large.len() as u64))
.sample_size(20),
);
}
fn from_bin(c: &mut Criterion) {
let data = test_data!(bytes "large/large.bin");
c.bench_function("from_app_image large.bin", move |b| {
b.iter(|| Gbl::from_app_image(AppImage::parse(data).unwrap()))
});
}
fn write(c: &mut Criterion) {
let bytes = test_data!(bytes "large/large.gbl");
c.bench(
"write large.gbl",
Benchmark::new("write large.gbl", move |b| {
let gbl = Gbl::parse(bytes).unwrap();
b.iter(|| {
// This also measures the vec allocation, but that shouldn't be a
// problem.
let mut buf = vec![0; 1024 * 1024 * 10];
gbl.write(&mut buf[..]).unwrap();
buf
})
})
.throughput(Throughput::Bytes(bytes.len() as u64))
.sample_size(20),
);
}
fn sign_encrypt(c: &mut Criterion) {
let data = test_data!(bytes "large/large.gbl");
let key = P256KeyPair::from_pem(test_data!(str "signing-key")).unwrap();
let gbl = Gbl::parse(data).unwrap();
let gbl = gbl.into_not_signed().unwrap().into_not_encrypted().unwrap();
let gbl2 = gbl.clone().into_owned();
c.bench(
"sign large.gbl",
Benchmark::new("sign large.gbl", move |b| {
b.iter(|| gbl2.clone().sign(&key).unwrap())
})
.throughput(Throughput::Bytes(data.len() as u64))
.sample_size(20),
);
c.bench(
"encrypt large.gbl",
Benchmark::new("encrypt large.gbl", move |b| {
b.iter(|| {
gbl.clone().encrypt(AesKey::from_raw([
0x5b, 0x69, 0x41, 0x78, 0xba, 0xa2, 0xc3, 0x6c, 0x63, 0x20, 0x65, 0xd5, 0xbe,
0xec, 0xaa, 0x3f,
]))
})
})
.throughput(Throughput::Bytes(data.len() as u64))
.sample_size(40),
);
}
criterion_group!(benches, parse, from_bin, write, sign_encrypt);
criterion_main!(benches);
|
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
extern crate failure;
extern crate fidl;
extern crate fidl_fuchsia_auth;
extern crate fuchsia_app as component;
extern crate fuchsia_async as async;
extern crate fuchsia_syslog as syslog;
extern crate fuchsia_zircon as zx;
extern crate futures;
#[macro_use]
extern crate log;
mod factory;
mod manager;
use component::server::ServicesServer;
use factory::TokenManagerFactory;
use failure::{Error, ResultExt};
use fidl::endpoints2::ServiceMarker;
use fidl_fuchsia_auth::TokenManagerFactoryMarker;
fn main() -> Result<(), Error> {
syslog::init_with_tags(&["auth"]).expect("Can't init logger");
info!("Starting token manager");
let mut executor = async::Executor::new().context("Error creating executor")?;
let fut = ServicesServer::new()
.add_service((TokenManagerFactoryMarker::NAME, |chan| {
TokenManagerFactory::spawn(chan)
}))
.start()
.context("Error starting Auth TokenManager server")?;
executor
.run_singlethreaded(fut)
.context("Failed to execute Auth TokenManager future")?;
info!("Stopping token manager");
Ok(())
}
|
//! Types
//!
//! See: [6.7 Types](http://erlang.org/doc/apps/erts/absform.html#id88630)
use ast;
use ast::common;
use ast::literal;
pub type UnaryOp = common::UnaryOp<Type>;
pub type BinaryOp = common::BinaryOp<Type>;
#[derive(Debug, Clone)]
pub enum Type {
Atom(Box<literal::Atom>),
Integer(Box<literal::Integer>),
Var(Box<common::Var>),
Annotated(Box<Annotated>),
UnaryOp(Box<UnaryOp>),
BinaryOp(Box<BinaryOp>),
BitString(Box<BitString>),
Nil(Box<common::Nil>),
AnyFun(Box<AnyFun>),
Function(Box<Fun>),
Range(Box<Range>),
Map(Box<Map>),
BuiltIn(Box<BuiltInType>),
Record(Box<Record>),
Remote(Box<RemoteType>),
AnyTuple(Box<AnyTuple>),
Tuple(Box<Tuple>),
Union(Box<Union>),
User(Box<UserType>),
}
impl_from!(Type::Atom(literal::Atom));
impl_from!(Type::Integer(literal::Integer));
impl_from!(Type::Var(common::Var));
impl_from!(Type::Annotated(Annotated));
impl_from!(Type::UnaryOp(UnaryOp));
impl_from!(Type::BinaryOp(BinaryOp));
impl_from!(Type::BitString(BitString));
impl_from!(Type::Nil(common::Nil));
impl_from!(Type::AnyFun(AnyFun));
impl_from!(Type::Function(Fun));
impl_from!(Type::Range(Range));
impl_from!(Type::Map(Map));
impl_from!(Type::BuiltIn(BuiltInType));
impl_from!(Type::Record(Record));
impl_from!(Type::Remote(RemoteType));
impl_from!(Type::AnyTuple(AnyTuple));
impl_from!(Type::Tuple(Tuple));
impl_from!(Type::Union(Union));
impl_from!(Type::User(UserType));
impl ast::Node for Type {
fn line(&self) -> ast::LineNum {
match *self {
Type::Integer(ref x) => x.line(),
Type::Atom(ref x) => x.line(),
Type::Var(ref x) => x.line(),
Type::Annotated(ref x) => x.line(),
Type::UnaryOp(ref x) => x.line(),
Type::BinaryOp(ref x) => x.line(),
Type::BitString(ref x) => x.line(),
Type::Nil(ref x) => x.line(),
Type::AnyFun(ref x) => x.line(),
Type::Function(ref x) => x.line(),
Type::Range(ref x) => x.line(),
Type::Map(ref x) => x.line(),
Type::BuiltIn(ref x) => x.line(),
Type::Record(ref x) => x.line(),
Type::Remote(ref x) => x.line(),
Type::AnyTuple(ref x) => x.line(),
Type::Tuple(ref x) => x.line(),
Type::Union(ref x) => x.line(),
Type::User(ref x) => x.line(),
}
}
}
impl Type {
pub fn any(line: ast::LineNum) -> Self {
Type::BuiltIn(Box::new(BuiltInType::new(
line,
"any".to_string(),
Vec::new(),
)))
}
}
#[derive(Debug, Clone)]
pub struct UserType {
pub line: ast::LineNum,
pub name: String,
pub args: Vec<Type>,
}
impl_node!(UserType);
impl UserType {
pub fn new(line: ast::LineNum, name: String, args: Vec<Type>) -> Self {
UserType { line, name, args }
}
}
#[derive(Debug, Clone)]
pub struct Union {
pub line: ast::LineNum,
pub types: Vec<Type>,
}
impl_node!(Union);
impl Union {
pub fn new(line: ast::LineNum, types: Vec<Type>) -> Self {
Union { line, types }
}
}
#[derive(Debug, Clone)]
pub struct AnyTuple {
pub line: ast::LineNum,
}
impl_node!(AnyTuple);
impl AnyTuple {
pub fn new(line: ast::LineNum) -> Self {
AnyTuple { line }
}
}
#[derive(Debug, Clone)]
pub struct Tuple {
pub line: ast::LineNum,
pub elements: Vec<Type>,
}
impl_node!(Tuple);
impl Tuple {
pub fn new(line: ast::LineNum, elements: Vec<Type>) -> Self {
Tuple { line, elements }
}
}
#[derive(Debug, Clone)]
pub struct RemoteType {
pub line: ast::LineNum,
pub module: String,
pub function: String,
pub args: Vec<Type>,
}
impl_node!(RemoteType);
impl RemoteType {
pub fn new(line: ast::LineNum, module: String, function: String, args: Vec<Type>) -> Self {
RemoteType {
line,
module,
function,
args,
}
}
}
#[derive(Debug, Clone)]
pub struct Record {
pub line: ast::LineNum,
pub name: String,
pub fields: Vec<RecordField>,
}
impl_node!(Record);
impl Record {
pub fn new(line: ast::LineNum, name: String, fields: Vec<RecordField>) -> Self {
Record { line, name, fields }
}
}
#[derive(Debug, Clone)]
pub struct RecordField {
pub line: ast::LineNum,
pub name: String,
pub ty: Type,
}
impl_node!(RecordField);
impl RecordField {
pub fn new(line: ast::LineNum, name: String, ty: Type) -> Self {
RecordField { line, name, ty }
}
}
#[derive(Debug, Clone)]
pub struct BuiltInType {
pub line: ast::LineNum,
pub name: String,
pub args: Vec<Type>,
}
impl_node!(BuiltInType);
impl BuiltInType {
pub fn new(line: ast::LineNum, name: String, args: Vec<Type>) -> Self {
BuiltInType { line, name, args }
}
}
#[derive(Debug, Clone)]
pub struct Map {
pub line: ast::LineNum,
pub pairs: Vec<MapPair>,
}
impl_node!(Map);
impl Map {
pub fn new(line: ast::LineNum, pairs: Vec<MapPair>) -> Self {
Map { line, pairs }
}
}
#[derive(Debug, Clone)]
pub struct MapPair {
pub line: ast::LineNum,
pub is_assoc: bool,
pub key: Type,
pub value: Type,
}
impl_node!(MapPair);
impl MapPair {
pub fn new(line: ast::LineNum, is_assoc: bool, key: Type, value: Type) -> Self {
MapPair {
line,
is_assoc,
key,
value,
}
}
}
#[derive(Debug, Clone)]
pub struct Annotated {
pub line: ast::LineNum,
pub name: common::Var,
pub ty: Type,
}
impl_node!(Annotated);
impl Annotated {
pub fn new(line: ast::LineNum, name: common::Var, ty: Type) -> Self {
Annotated { line, name, ty }
}
}
#[derive(Debug, Clone)]
pub struct BitString {
pub line: ast::LineNum,
pub bytes: u64,
pub tail_bits: u64,
}
impl_node!(BitString);
impl BitString {
pub fn new(line: ast::LineNum, bytes: u64, tail_bits: u64) -> Self {
BitString {
line,
bytes,
tail_bits,
}
}
}
#[derive(Debug, Clone)]
pub struct AnyFun {
pub line: ast::LineNum,
pub return_type: Option<Type>,
}
impl_node!(AnyFun);
impl AnyFun {
pub fn new(line: ast::LineNum) -> Self {
AnyFun {
line,
return_type: None,
}
}
pub fn return_type(mut self, return_type: Type) -> Self {
self.return_type = Some(return_type);
self
}
}
#[derive(Debug, Clone)]
pub struct Fun {
pub line: ast::LineNum,
pub args: Vec<Type>,
pub return_type: Type,
pub constraints: Vec<Constraint>,
}
impl_node!(Fun);
impl Fun {
pub fn new(line: ast::LineNum, args: Vec<Type>, return_type: Type) -> Self {
Fun {
line,
args,
return_type,
constraints: Vec::new(),
}
}
pub fn constraints(mut self, constraints: Vec<Constraint>) -> Self {
self.constraints = constraints;
self
}
}
#[derive(Debug, Clone)]
pub struct Constraint {
pub line: ast::LineNum,
pub var: common::Var,
pub subtype: Type,
}
impl_node!(Constraint);
impl Constraint {
pub fn new(line: ast::LineNum, var: common::Var, subtype: Type) -> Self {
Constraint { line, var, subtype }
}
}
#[derive(Debug, Clone)]
pub struct Range {
pub line: ast::LineNum,
pub low: Type,
pub high: Type,
}
impl_node!(Range);
impl Range {
pub fn new(line: ast::LineNum, low: Type, high: Type) -> Self {
Range { line, low, high }
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
pub location: String,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PatchedResource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebService {
#[serde(flatten)]
pub resource: Resource,
pub properties: WebServiceProperties,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PatchedWebService {
#[serde(flatten)]
pub patched_resource: PatchedResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<WebServiceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebServiceProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "createdOn", default, skip_serializing_if = "Option::is_none")]
pub created_on: Option<String>,
#[serde(rename = "modifiedOn", default, skip_serializing_if = "Option::is_none")]
pub modified_on: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<web_service_properties::ProvisioningState>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub keys: Option<WebServiceKeys>,
#[serde(rename = "readOnly", default, skip_serializing_if = "Option::is_none")]
pub read_only: Option<bool>,
#[serde(rename = "swaggerLocation", default, skip_serializing_if = "Option::is_none")]
pub swagger_location: Option<String>,
#[serde(rename = "exposeSampleData", default, skip_serializing_if = "Option::is_none")]
pub expose_sample_data: Option<bool>,
#[serde(rename = "realtimeConfiguration", default, skip_serializing_if = "Option::is_none")]
pub realtime_configuration: Option<RealtimeConfiguration>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub diagnostics: Option<DiagnosticsConfiguration>,
#[serde(rename = "storageAccount", default, skip_serializing_if = "Option::is_none")]
pub storage_account: Option<StorageAccount>,
#[serde(rename = "machineLearningWorkspace", default, skip_serializing_if = "Option::is_none")]
pub machine_learning_workspace: Option<MachineLearningWorkspace>,
#[serde(rename = "commitmentPlan", default, skip_serializing_if = "Option::is_none")]
pub commitment_plan: Option<CommitmentPlan>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub input: Option<ServiceInputOutputSpecification>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub output: Option<ServiceInputOutputSpecification>,
#[serde(rename = "exampleRequest", default, skip_serializing_if = "Option::is_none")]
pub example_request: Option<ExampleRequest>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub assets: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub parameters: Option<serde_json::Value>,
#[serde(rename = "packageType")]
pub package_type: web_service_properties::PackageType,
#[serde(rename = "payloadsInBlobStorage", default, skip_serializing_if = "Option::is_none")]
pub payloads_in_blob_storage: Option<bool>,
#[serde(rename = "payloadsLocation", default, skip_serializing_if = "Option::is_none")]
pub payloads_location: Option<BlobLocation>,
}
pub mod web_service_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Unknown,
Provisioning,
Succeeded,
Failed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PackageType {
Graph,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebServicePropertiesForGraph {
#[serde(flatten)]
pub web_service_properties: WebServiceProperties,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub package: Option<GraphPackage>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebServiceKeys {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub primary: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub secondary: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PaginatedWebServicesList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<WebService>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RealtimeConfiguration {
#[serde(rename = "maxConcurrentCalls", default, skip_serializing_if = "Option::is_none")]
pub max_concurrent_calls: Option<i64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DiagnosticsConfiguration {
pub level: diagnostics_configuration::Level,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub expiry: Option<String>,
}
pub mod diagnostics_configuration {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Level {
None,
Error,
All,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct StorageAccount {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub key: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MachineLearningWorkspace {
pub id: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CommitmentPlan {
pub id: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceInputOutputSpecification {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "type")]
pub type_: String,
pub properties: serde_json::Value,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TableSpecification {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "type")]
pub type_: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub format: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ColumnSpecification {
#[serde(rename = "type")]
pub type_: column_specification::Type,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub format: Option<column_specification::Format>,
#[serde(rename = "enum", default, skip_serializing_if = "Vec::is_empty")]
pub enum_: Vec<serde_json::Value>,
#[serde(rename = "x-ms-isnullable", default, skip_serializing_if = "Option::is_none")]
pub x_ms_isnullable: Option<bool>,
#[serde(rename = "x-ms-isordered", default, skip_serializing_if = "Option::is_none")]
pub x_ms_isordered: Option<bool>,
}
pub mod column_specification {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
Boolean,
Integer,
Number,
String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Format {
Byte,
Char,
Complex64,
Complex128,
#[serde(rename = "Date-time")]
DateTime,
#[serde(rename = "Date-timeOffset")]
DateTimeOffset,
Double,
Duration,
Float,
Int8,
Int16,
Int32,
Int64,
Uint8,
Uint16,
Uint32,
Uint64,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ExampleRequest {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub inputs: Option<serde_json::Value>,
#[serde(rename = "globalParameters", default, skip_serializing_if = "Option::is_none")]
pub global_parameters: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AssetItem {
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "type")]
pub type_: asset_item::Type,
#[serde(rename = "locationInfo")]
pub location_info: BlobLocation,
#[serde(rename = "inputPorts", default, skip_serializing_if = "Option::is_none")]
pub input_ports: Option<serde_json::Value>,
#[serde(rename = "outputPorts", default, skip_serializing_if = "Option::is_none")]
pub output_ports: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub metadata: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub parameters: Vec<ModuleAssetParameter>,
}
pub mod asset_item {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
Module,
Resource,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BlobLocation {
pub uri: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub credentials: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ModuleAssetParameter {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "parameterType", default, skip_serializing_if = "Option::is_none")]
pub parameter_type: Option<String>,
#[serde(rename = "modeValuesInfo", default, skip_serializing_if = "Option::is_none")]
pub mode_values_info: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ModeValueInfo {
#[serde(rename = "interfaceString", default, skip_serializing_if = "Option::is_none")]
pub interface_string: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub parameters: Vec<ModuleAssetParameter>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct InputPort {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<input_port::Type>,
}
pub mod input_port {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
Dataset,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OutputPort {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<output_port::Type>,
}
pub mod output_port {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
Dataset,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GraphPackage {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub nodes: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub edges: Vec<GraphEdge>,
#[serde(rename = "graphParameters", default, skip_serializing_if = "Option::is_none")]
pub graph_parameters: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GraphNode {
#[serde(rename = "assetId", default, skip_serializing_if = "Option::is_none")]
pub asset_id: Option<String>,
#[serde(rename = "inputId", default, skip_serializing_if = "Option::is_none")]
pub input_id: Option<String>,
#[serde(rename = "outputId", default, skip_serializing_if = "Option::is_none")]
pub output_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub parameters: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GraphEdge {
#[serde(rename = "sourceNodeId", default, skip_serializing_if = "Option::is_none")]
pub source_node_id: Option<String>,
#[serde(rename = "sourcePortId", default, skip_serializing_if = "Option::is_none")]
pub source_port_id: Option<String>,
#[serde(rename = "targetNodeId", default, skip_serializing_if = "Option::is_none")]
pub target_node_id: Option<String>,
#[serde(rename = "targetPortId", default, skip_serializing_if = "Option::is_none")]
pub target_port_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GraphParameter {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "type")]
pub type_: graph_parameter::Type,
pub links: Vec<GraphParameterLink>,
}
pub mod graph_parameter {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
String,
Int,
Float,
Enumerated,
Script,
Mode,
Credential,
Boolean,
Double,
ColumnPicker,
ParameterRange,
DataGatewayName,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct GraphParameterLink {
#[serde(rename = "nodeId")]
pub node_id: String,
#[serde(rename = "parameterKey")]
pub parameter_key: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WebServiceParameter {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<serde_json::Value>,
#[serde(rename = "certificateThumbprint", default, skip_serializing_if = "Option::is_none")]
pub certificate_thumbprint: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AsyncOperationStatus {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<async_operation_status::ProvisioningState>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")]
pub end_time: Option<String>,
#[serde(rename = "percentComplete", default, skip_serializing_if = "Option::is_none")]
pub percent_complete: Option<f64>,
#[serde(rename = "errorInfo", default, skip_serializing_if = "Option::is_none")]
pub error_info: Option<AsyncOperationErrorInfo>,
}
pub mod async_operation_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ProvisioningState {
Unknown,
Provisioning,
Succeeded,
Failed,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AsyncOperationErrorInfo {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<AsyncOperationErrorInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationEntityListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<OperationEntity>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationEntity {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<OperationDisplayInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationDisplayInfo {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
}
|
use crate::domain::role::{Role, RoleId};
use crate::domain::user::{
Email, Fullname, Identity, Password, PasswordHasher, Person, Provider, User, UserId, Username,
};
use crate::mocks::FakePasswordHasher;
pub fn user1() -> User {
let ph = FakePasswordHasher::new();
User::new(
UserId::new("#user1").unwrap(),
Identity::new(
Provider::Local,
Username::new("user-one").unwrap(),
Email::new("user@one.com").unwrap(),
Some(Password::new(&ph.hash("P@asswd!").unwrap()).unwrap()),
)
.unwrap(),
user_role(),
)
.unwrap()
}
pub fn user2() -> User {
let ph = FakePasswordHasher::new();
User::new(
UserId::new("#user2").unwrap(),
Identity::new(
Provider::Local,
Username::new("user-two").unwrap(),
Email::new("user@two.com").unwrap(),
Some(Password::new(&ph.hash("P@asswd!").unwrap()).unwrap()),
)
.unwrap(),
user_role(),
)
.unwrap()
}
pub fn validated_user1() -> User {
let mut user = user1();
let validation = user.validation().cloned().unwrap();
user.validate(&validation).unwrap();
user
}
pub fn validated_user2() -> User {
let mut user = user2();
let validation = user.validation().cloned().unwrap();
user.validate(&validation).unwrap();
user
}
pub fn admin1() -> User {
let ph = FakePasswordHasher::new();
User::new(
UserId::new("#admin1").unwrap(),
Identity::new(
Provider::Local,
Username::new("admin-1").unwrap(),
Email::new("admin.1@system.com").unwrap(),
Some(Password::new(&ph.hash("P@asswd!").unwrap()).unwrap()),
)
.unwrap(),
admin_role(),
)
.unwrap()
}
pub fn user_role() -> Role {
Role::new(RoleId::new("user").unwrap(), "User").unwrap()
}
pub fn admin_role() -> Role {
Role::new(RoleId::new("admin").unwrap(), "Administrator").unwrap()
}
pub fn person1() -> Person {
Person::new(Fullname::new("User", "One").unwrap()).unwrap()
}
|
use glium::glutin::event::{Event, StartCause, WindowEvent};
use glium::glutin::event_loop::{ControlFlow, EventLoop};
use glium::glutin::window::WindowBuilder;
use glium::glutin::ContextBuilder;
use lazy_static::lazy_static;
use send_wrapper::SendWrapper;
use std::cell::RefCell;
use std::collections::VecDeque;
use std::time::{Duration, Instant};
use crate::game::Game;
use crate::render;
lazy_static! {
static ref EVENT_LOOP: SendWrapper<RefCell<Option<EventLoop<()>>>> =
SendWrapper::new(RefCell::new(Some(EventLoop::new())));
pub static ref DISPLAY: SendWrapper<glium::Display> = SendWrapper::new({
let wb = WindowBuilder::new().with_title(crate::TITLE.to_owned());
let cb = ContextBuilder::new().with_vsync(true);
glium::Display::new(wb, cb, EVENT_LOOP.borrow().as_ref().unwrap())
.expect("Failed to initialize display")
});
}
pub fn show_gui() -> ! {
let display = &**DISPLAY;
// Initialize runtime data.
let mut game = Game::load_from_file();
let mut events_buffer = VecDeque::new();
// Main loop.
let mut last_frame_time = Instant::now();
let mut next_frame_time = Instant::now();
let ev_loop = EVENT_LOOP.borrow_mut().take().unwrap();
ev_loop.run(move |event, _ev_loop, control_flow| {
// Handle events.
let mut now = Instant::now();
let mut do_frame = false;
match event.to_static() {
Some(Event::NewEvents(cause)) => match cause {
StartCause::ResumeTimeReached {
start: _,
requested_resume,
} => {
now = requested_resume;
do_frame = true;
}
StartCause::Init => {
next_frame_time = now;
do_frame = true;
}
_ => (),
},
// The program is about to exit.
Some(Event::LoopDestroyed) =>
game.save_to_file()
,
// Queue the event to be handled next time we render
// everything.
Some(ev) => events_buffer.push_back(ev),
// Ignore this event.
None => (),
}
if do_frame && next_frame_time <= now {
let frame_duration = Duration::from_secs_f64(1.0 / 60.0);
next_frame_time = now + frame_duration;
if next_frame_time < Instant::now() {
// Skip a frame (or several).
next_frame_time = Instant::now() + frame_duration;
}
*control_flow = ControlFlow::WaitUntil(next_frame_time);
let frame_duration = now
.checked_duration_since(last_frame_time)
.unwrap_or(frame_duration);
// TODO: give `frame_duration` to egui if egui wants it
last_frame_time = now;
for ev in events_buffer.drain(..) {
// Handle events.
match ev {
Event::WindowEvent { event, .. } => match event {
// Handle window close event.
WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,
// Let the game handle any other event.
ev => game.handle_event(ev),
},
_ => (),
}
}
game.do_frame(frame_duration);
// Draw everything.
let mut target = display.draw();
render::draw_grid(&mut target, &game.grid, &mut game.camera);
target.finish().expect("Failed to swap buffers");
}
})
}
|
use anyhow::{format_err, Error};
use derive_more::{Deref, From, Into};
use serde::{Deserialize, Serialize};
use stack_string::StackString;
use std::{
convert::{Into, TryFrom, TryInto},
fmt::Debug,
str::FromStr,
sync::Arc,
};
use url::Url;
use uuid::Uuid;
use gdrive_lib::date_time_wrapper::DateTimeWrapper;
use crate::{
file_info_gcs::FileInfoGcs, file_info_gdrive::FileInfoGDrive, file_info_local::FileInfoLocal,
file_info_s3::FileInfoS3, file_info_ssh::FileInfoSSH, file_service::FileService, map_parse,
models::FileInfoCache, path_buf_wrapper::PathBufWrapper, pgpool::PgPool,
url_wrapper::UrlWrapper,
};
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)]
pub struct FileStat {
pub st_mtime: u32,
pub st_size: u32,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Into, From, Deref)]
pub struct Md5Sum(StackString);
impl FromStr for Md5Sum {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.len() == 32 {
Ok(Self(s.into()))
} else {
Err(format_err!("Invalid md5sum {s}"))
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Into)]
pub struct Sha1Sum(StackString);
impl FromStr for Sha1Sum {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.len() == 40 {
Ok(Self(s.into()))
} else {
Err(format_err!("Invalid sha1sum {s}"))
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default, Into, From, Deref)]
pub struct ServiceId(StackString);
impl From<String> for ServiceId {
fn from(s: String) -> Self {
Self(s.into())
}
}
impl From<&str> for ServiceId {
fn from(s: &str) -> Self {
Self(s.into())
}
}
impl From<ServiceSession> for ServiceId {
fn from(s: ServiceSession) -> Self {
Self(s.0)
}
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default, Into, Deref)]
pub struct ServiceSession(StackString);
impl FromStr for ServiceSession {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.is_empty() {
Err(format_err!("Session name must not be empty"))
} else {
Ok(Self(s.into()))
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct FileInfoInner {
pub filename: StackString,
pub filepath: PathBufWrapper,
pub urlname: UrlWrapper,
pub md5sum: Option<Md5Sum>,
pub sha1sum: Option<Sha1Sum>,
pub filestat: FileStat,
pub serviceid: ServiceId,
pub servicetype: FileService,
pub servicesession: ServiceSession,
}
impl Default for FileInfoInner {
fn default() -> Self {
Self {
filename: StackString::default(),
filepath: ".".into(),
urlname: ".".parse().unwrap(),
md5sum: None,
sha1sum: None,
filestat: FileStat::default(),
serviceid: ServiceId::default(),
servicetype: FileService::default(),
servicesession: ServiceSession::default(),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Default, Deref)]
pub struct FileInfo(Arc<FileInfoInner>);
// impl Deref for FileInfo {
// type Target = FileInfoInner;
// fn deref(&self) -> &Self::Target {
// &self.0
// }
// }
pub enum FileInfoKeyType {
FileName,
FilePath,
UrlName,
Md5Sum,
Sha1Sum,
ServiceId,
}
pub trait FileInfoTrait: Send + Sync + Debug {
fn get_finfo(&self) -> &FileInfo;
fn into_finfo(self) -> FileInfo;
fn get_md5(&self) -> Option<Md5Sum>;
fn get_sha1(&self) -> Option<Sha1Sum>;
fn get_stat(&self) -> FileStat;
}
impl FileInfo {
#[must_use]
#[allow(clippy::too_many_arguments)]
pub fn new(
filename: StackString,
filepath: PathBufWrapper,
urlname: UrlWrapper,
md5sum: Option<Md5Sum>,
sha1sum: Option<Sha1Sum>,
filestat: FileStat,
serviceid: ServiceId,
servicetype: FileService,
servicesession: ServiceSession,
) -> Self {
let inner = FileInfoInner {
filename,
filepath,
urlname,
md5sum,
sha1sum,
filestat,
serviceid,
servicetype,
servicesession,
};
Self(Arc::new(inner))
}
#[must_use]
pub fn from_inner(inner: FileInfoInner) -> Self {
Self(Arc::new(inner))
}
#[must_use]
pub fn inner(&self) -> &FileInfoInner {
&self.0
}
/// # Errors
/// Return error if bad scheme
pub fn from_url(url: &Url) -> Result<Self, Error> {
match url.scheme() {
"file" => FileInfoLocal::from_url(url).map(FileInfoTrait::into_finfo),
"s3" => FileInfoS3::from_url(url).map(FileInfoTrait::into_finfo),
"gs" => FileInfoGcs::from_url(url).map(FileInfoTrait::into_finfo),
"gdrive" => FileInfoGDrive::from_url(url).map(FileInfoTrait::into_finfo),
"ssh" => FileInfoSSH::from_url(url).map(FileInfoTrait::into_finfo),
_ => Err(format_err!("Bad scheme")),
}
}
}
impl FileInfoTrait for FileInfo {
fn get_finfo(&self) -> &Self {
self
}
fn into_finfo(self) -> Self {
self
}
fn get_md5(&self) -> Option<Md5Sum> {
self.md5sum.clone()
}
fn get_sha1(&self) -> Option<Sha1Sum> {
self.sha1sum.clone()
}
fn get_stat(&self) -> FileStat {
self.filestat
}
}
impl TryFrom<&FileInfoCache> for FileInfo {
type Error = Error;
fn try_from(item: &FileInfoCache) -> Result<Self, Self::Error> {
let inner = FileInfoInner {
filename: item.filename.clone(),
filepath: item.filepath.as_str().into(),
urlname: item.urlname.parse()?,
md5sum: map_parse(&item.md5sum)?,
sha1sum: map_parse(&item.sha1sum)?,
filestat: FileStat {
st_mtime: item.filestat_st_mtime as u32,
st_size: item.filestat_st_size as u32,
},
serviceid: item.serviceid.as_str().into(),
servicetype: item.servicetype.parse()?,
servicesession: item.servicesession.parse()?,
};
Ok(Self(Arc::new(inner)))
}
}
impl TryFrom<FileInfoCache> for FileInfo {
type Error = Error;
fn try_from(item: FileInfoCache) -> Result<Self, Self::Error> {
let inner = FileInfoInner {
filename: item.filename,
filepath: item.filepath.as_str().into(),
urlname: item.urlname.parse()?,
md5sum: map_parse(&item.md5sum)?,
sha1sum: map_parse(&item.sha1sum)?,
filestat: FileStat {
st_mtime: item.filestat_st_mtime as u32,
st_size: item.filestat_st_size as u32,
},
serviceid: item.serviceid.as_str().into(),
servicetype: item.servicetype.parse()?,
servicesession: item.servicesession.parse()?,
};
Ok(Self(Arc::new(inner)))
}
}
impl FileInfo {
/// # Errors
/// Return error if db query fails
pub async fn from_database(pool: &PgPool, url: &Url) -> Result<Option<Self>, Error> {
FileInfoCache::get_by_urlname(url, pool)
.await?
.map(TryInto::try_into)
.transpose()
}
}
impl From<&FileInfo> for FileInfoCache {
fn from(item: &FileInfo) -> Self {
Self {
id: Uuid::new_v4(),
filename: item.filename.clone(),
filepath: item.filepath.to_string_lossy().as_ref().into(),
urlname: item.urlname.as_str().into(),
md5sum: item.md5sum.as_ref().map(|m| m.0.clone()),
sha1sum: item.sha1sum.as_ref().map(|s| s.0.clone()),
filestat_st_mtime: item.filestat.st_mtime as i32,
filestat_st_size: item.filestat.st_size as i32,
serviceid: item.serviceid.0.clone(),
servicetype: item.servicetype.to_str().into(),
servicesession: item.servicesession.0.clone(),
created_at: DateTimeWrapper::now(),
deleted_at: None,
modified_at: DateTimeWrapper::now(),
}
}
}
impl From<FileInfo> for FileInfoCache {
fn from(item: FileInfo) -> Self {
Self::from(&item)
}
}
/// # Errors
/// Return error if db query fails
pub async fn cache_file_info(pool: &PgPool, finfo: FileInfo) -> Result<FileInfoCache, Error> {
let finfo_cache: FileInfoCache = finfo.into();
finfo_cache.insert(pool).await?;
let cache = finfo_cache
.get_cache(pool)
.await?
.ok_or_else(|| format_err!("Insert failed"))?;
Ok(cache)
}
#[cfg(test)]
mod tests {
use stack_string::StackString;
use crate::file_info::{map_parse, ServiceSession};
#[test]
fn test_map_parse() {
let test_sessionstr: Option<StackString> = Some("test_sessionname".into());
let test_sessionname: Option<ServiceSession> = map_parse(&test_sessionstr).unwrap();
assert_eq!(
test_sessionname,
Some(ServiceSession("test_sessionname".into()))
);
}
}
|
use crate::block_assembler::{
BlockAssembler, BlockTemplateCacheKey, CandidateUncles, TemplateCache,
};
use crate::component::commit_txs_scanner::CommitTxsScanner;
use crate::component::entry::TxEntry;
use crate::config::BlockAssemblerConfig;
use crate::error::BlockAssemblerError;
use crate::pool::TxPool;
use ckb_dao::DaoCalculator;
use ckb_jsonrpc_types::BlockTemplate;
use ckb_logger::info;
use ckb_snapshot::Snapshot;
use ckb_store::ChainStore;
use ckb_types::{
core::{
cell::{resolve_transaction, OverlayCellProvider, TransactionsProvider},
Cycle, EpochExt, ScriptHashType, TransactionView, UncleBlockView, Version,
},
packed::{CellbaseWitness, ProposalShortId, Script},
prelude::*,
};
use failure::Error as FailureError;
use faketime::unix_time_as_millis;
use futures::future::Future;
use lru_cache::LruCache;
use std::collections::HashSet;
use std::sync::atomic::{AtomicU64, AtomicUsize, Ordering};
use std::sync::Arc;
use std::{cmp, iter};
use tokio::prelude::{Async, Poll};
use tokio::sync::lock::Lock;
type Args = (u64, u64, Version);
pub struct BlockTemplateCacheProcess {
pub template_caches: Lock<LruCache<BlockTemplateCacheKey, TemplateCache>>,
pub last_txs_updated_at: Arc<AtomicU64>,
pub last_uncles_updated_at: Arc<AtomicU64>,
pub snapshot: Arc<Snapshot>,
pub args: Args,
}
impl BlockTemplateCacheProcess {
pub fn new(
template_caches: Lock<LruCache<BlockTemplateCacheKey, TemplateCache>>,
last_txs_updated_at: Arc<AtomicU64>,
last_uncles_updated_at: Arc<AtomicU64>,
snapshot: Arc<Snapshot>,
args: Args,
) -> Self {
BlockTemplateCacheProcess {
template_caches,
last_txs_updated_at,
last_uncles_updated_at,
snapshot,
args,
}
}
}
impl Future for BlockTemplateCacheProcess {
type Item = BlockTemplate;
type Error = ();
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match self.template_caches.poll_lock() {
Async::Ready(guard) => {
let (bytes_limit, proposals_limit, version) = self.args;
let tip_header = self.snapshot.tip_header();
let tip_hash = tip_header.hash();
let current_time = cmp::max(unix_time_as_millis(), tip_header.timestamp() + 1);
let last_uncles_updated_at = self.last_uncles_updated_at.load(Ordering::SeqCst);
let last_txs_updated_at = self.last_txs_updated_at.load(Ordering::SeqCst);
if let Some(template_cache) =
guard.get(&(tip_hash, bytes_limit, proposals_limit, version))
{
// check template cache outdate time
if !template_cache.is_outdate(current_time) {
let mut template = template_cache.template.clone();
template.current_time = current_time.into();
return Ok(Async::Ready(template));
}
if !template_cache.is_modified(last_uncles_updated_at, last_txs_updated_at) {
let mut template = template_cache.template.clone();
template.current_time = current_time.into();
return Ok(Async::Ready(template));
}
}
Err(())
}
Async::NotReady => Ok(Async::NotReady),
}
}
}
pub struct BuildCellbaseProcess {
pub snapshot: Arc<Snapshot>,
pub config: Arc<BlockAssemblerConfig>,
}
impl BuildCellbaseProcess {
pub fn new(snapshot: Arc<Snapshot>, config: Arc<BlockAssemblerConfig>) -> Self {
BuildCellbaseProcess { snapshot, config }
}
}
impl Future for BuildCellbaseProcess {
type Item = TransactionView;
type Error = FailureError;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
let tip_header = self.snapshot.tip_header();
let hash_type: ScriptHashType = self.config.hash_type.clone().into();
let cellbase_lock = Script::new_builder()
.args(self.config.args.as_bytes().pack())
.code_hash(self.config.code_hash.pack())
.hash_type(hash_type.into())
.build();
let cellbase_witness = CellbaseWitness::new_builder()
.lock(cellbase_lock)
.message(self.config.message.as_bytes().pack())
.build();
let cellbase =
BlockAssembler::build_cellbase(&self.snapshot, tip_header, cellbase_witness)?;
Ok(Async::Ready(cellbase))
}
}
pub struct PrepareUnclesProcess {
pub snapshot: Arc<Snapshot>,
pub last_uncles_updated_at: Arc<AtomicU64>,
pub candidate_uncles: Lock<CandidateUncles>,
}
impl Future for PrepareUnclesProcess {
type Item = (Vec<UncleBlockView>, EpochExt, u64);
type Error = FailureError;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match self.candidate_uncles.poll_lock() {
Async::Ready(mut guard) => {
let consensus = self.snapshot.consensus();
let tip_header = self.snapshot.tip_header();
let last_epoch = self
.snapshot
.get_current_epoch_ext()
.expect("current epoch ext");
let next_epoch_ext =
self.snapshot
.next_epoch_ext(consensus, &last_epoch, tip_header);
let current_epoch = next_epoch_ext.unwrap_or(last_epoch);
let candidate_number = tip_header.number() + 1;
let uncles = BlockAssembler::prepare_uncles(
&self.snapshot,
candidate_number,
¤t_epoch,
&mut guard,
);
let last_uncles_updated_at = self.last_uncles_updated_at.load(Ordering::SeqCst);
Ok(Async::Ready((
uncles,
current_epoch,
last_uncles_updated_at,
)))
}
Async::NotReady => Ok(Async::NotReady),
}
}
}
pub struct PackageTxsProcess {
pub tx_pool: Lock<TxPool>,
pub bytes_limit: u64,
pub proposals_limit: u64,
pub max_block_cycles: Cycle,
pub last_txs_updated_at: Arc<AtomicU64>,
pub cellbase: TransactionView,
pub uncles: Vec<UncleBlockView>,
}
impl Future for PackageTxsProcess {
type Item = (HashSet<ProposalShortId>, Vec<TxEntry>, u64);
type Error = FailureError;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match self.tx_pool.poll_lock() {
Async::Ready(guard) => {
let uncle_proposals = self
.uncles
.iter()
.flat_map(|u| u.data().proposals().into_iter())
.collect();
let proposals =
guard.get_proposals(self.proposals_limit as usize, &uncle_proposals);
let txs_size_limit = BlockAssembler::calculate_txs_size_limit(
self.bytes_limit,
self.cellbase.data(),
&self.uncles,
&proposals,
)?;
let (entries, size, cycles) = CommitTxsScanner::new(guard.proposed())
.txs_to_commit(
txs_size_limit,
self.max_block_cycles,
guard.config.min_fee_rate,
);
if !entries.is_empty() {
info!(
"[get_block_template] candidate txs count: {}, size: {}/{}, cycles:{}/{}",
entries.len(),
size,
txs_size_limit,
cycles,
self.max_block_cycles
);
}
let last_txs_updated_at = self.last_txs_updated_at.load(Ordering::SeqCst);
Ok(Async::Ready((proposals, entries, last_txs_updated_at)))
}
Async::NotReady => Ok(Async::NotReady),
}
}
}
pub struct BlockTemplateBuilder {
pub snapshot: Arc<Snapshot>,
pub entries: Vec<TxEntry>,
pub proposals: HashSet<ProposalShortId>,
pub cellbase: TransactionView,
pub work_id: Arc<AtomicUsize>,
pub current_epoch: EpochExt,
pub uncles: Vec<UncleBlockView>,
pub args: Args,
pub uncles_updated_at: u64,
pub txs_updated_at: u64,
}
impl Future for BlockTemplateBuilder {
type Item = (BlockTemplate, u64, u64);
type Error = FailureError;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
let consensus = self.snapshot.consensus();
let tip_header = self.snapshot.tip_header();
let tip_hash = tip_header.hash();
let mut txs =
iter::once(&self.cellbase).chain(self.entries.iter().map(|entry| &entry.transaction));
let mut seen_inputs = HashSet::new();
let transactions_provider = TransactionsProvider::new(txs.clone());
let overlay_cell_provider =
OverlayCellProvider::new(&transactions_provider, self.snapshot.as_ref());
let rtxs = txs
.try_fold(vec![], |mut rtxs, tx| {
resolve_transaction(
tx.clone(),
&mut seen_inputs,
&overlay_cell_provider,
self.snapshot.as_ref(),
)
.map(|rtx| {
rtxs.push(rtx);
rtxs
})
})
.map_err(|_| BlockAssemblerError::InvalidInput)?;
// Generate DAO fields here
let dao =
DaoCalculator::new(consensus, self.snapshot.as_ref()).dao_field(&rtxs, tip_header)?;
let candidate_number = tip_header.number() + 1;
let (bytes_limit, _, version) = self.args;
let cycles_limit = consensus.max_block_cycles();
let uncles_count_limit = consensus.max_uncles_num() as u32;
// Should recalculate current time after create cellbase (create cellbase may spend a lot of time)
let current_time = cmp::max(unix_time_as_millis(), tip_header.timestamp() + 1);
Ok(Async::Ready((
BlockTemplate {
version: version.into(),
compact_target: self.current_epoch.compact_target().into(),
current_time: current_time.into(),
number: candidate_number.into(),
epoch: self
.current_epoch
.number_with_fraction(candidate_number)
.into(),
parent_hash: tip_hash.unpack(),
cycles_limit: cycles_limit.into(),
bytes_limit: bytes_limit.into(),
uncles_count_limit: u64::from(uncles_count_limit).into(),
uncles: self
.uncles
.iter()
.map(BlockAssembler::transform_uncle)
.collect(),
transactions: self
.entries
.iter()
.map(|entry| BlockAssembler::transform_tx(entry, false, None))
.collect(),
proposals: self.proposals.iter().cloned().map(Into::into).collect(),
cellbase: BlockAssembler::transform_cellbase(&self.cellbase, None),
work_id: (self.work_id.fetch_add(1, Ordering::SeqCst) as u64).into(),
dao: dao.into(),
},
self.uncles_updated_at,
self.txs_updated_at,
)))
}
}
pub struct UpdateBlockTemplateCache {
template_caches: Lock<LruCache<BlockTemplateCacheKey, TemplateCache>>,
key: Option<BlockTemplateCacheKey>,
uncles_updated_at: u64,
txs_updated_at: u64,
template: Option<BlockTemplate>,
}
impl UpdateBlockTemplateCache {
pub fn new(
template_caches: Lock<LruCache<BlockTemplateCacheKey, TemplateCache>>,
key: BlockTemplateCacheKey,
uncles_updated_at: u64,
txs_updated_at: u64,
template: BlockTemplate,
) -> Self {
UpdateBlockTemplateCache {
template_caches,
key: Some(key),
uncles_updated_at,
txs_updated_at,
template: Some(template),
}
}
}
impl Future for UpdateBlockTemplateCache {
type Item = ();
type Error = ();
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match self.template_caches.poll_lock() {
Async::Ready(mut guard) => {
let key = self.key.take().expect("cannot poll twice");
let template = self.template.take().expect("cannot poll twice");
guard.insert(
key,
TemplateCache {
time: template.current_time.into(),
uncles_updated_at: self.uncles_updated_at,
txs_updated_at: self.txs_updated_at,
template,
},
);
Ok(Async::Ready(()))
}
Async::NotReady => Ok(Async::NotReady),
}
}
}
|
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use failure::Error;
use identity::{Identity, Signature, Secret, Address};
use packet::{self, RoutingDirection, RoutingKey};
use snow::{self, params::NoiseParams, Builder};
use snow::resolvers::{FallbackResolver, CryptoResolver};
use std::io::Write;
use std::io::Read;
#[derive(Debug, Fail)]
enum NoiseError {
#[fail(display = "packet too small, expected {}, got {}", need, got)]
TooSmall { need: usize, got: usize },
#[fail(display = "packet not padded correctly to 256 bytes: {}", len)]
PktMisaligned { len: usize },
#[fail(display = "decrypted payload size header is bigger than payload")]
DecryptedInvalidPayloadLen,
#[fail(display = "refusing to send unencrypted payload")]
PayloadUnencrypted,
#[fail(
display = "trying to decrypt packet for route {:#x} but the encryption state is for route {:#x}",
dest,
this
)]
WrongRoute { dest: RoutingKey, this: RoutingKey },
#[fail(
display = "packet arrived with the same routing direction we're sending with {:?}",
dir,
)]
WrongDirection { dir: RoutingDirection },
#[fail(display = "invalid cookie. probably a replay")]
InvalidCookie,
}
pub struct Transport {
counter: u64,
noise: snow::Session,
route: RoutingKey,
direction: RoutingDirection,
}
pub struct HandshakeRequester {
noise: snow::Session,
timestamp: u64,
route: Option<RoutingKey>,
}
pub struct HandshakeResponder {
noise: snow::Session,
timestamp: u64,
}
enum SendMode<'a> {
Transport {
counter: u64,
payload: &'a [u8],
},
InsecureHandshake{
identity: Identity,
timestamp: u64,
},
Handshake{
identity: Identity,
timestamp: u64,
},
}
fn send(
noise: &mut snow::Session,
route: RoutingKey,
direction: RoutingDirection,
payload: SendMode,
) -> Result<packet::EncryptedPacket, Error> {
let counter = if let &SendMode::Transport { counter, .. } = &payload {
Some(counter)
} else {
None
};
let mut inbuf = Vec::new();
let overhead = match payload {
SendMode::Transport { payload, .. } => {
assert!(payload.len() + 100 < u16::max_value() as usize);
inbuf.write_u16::<BigEndian>(payload.len() as u16)?;
inbuf.extend_from_slice(payload);
16
}
SendMode::InsecureHandshake{
identity,
timestamp,
} => {
assert_eq!(identity.as_bytes().len(), 32);
inbuf.write_all(&identity.as_bytes())?;
inbuf.write_u64::<BigEndian>(timestamp)?;
32 // ephermal
+ 64 // signature
}
SendMode::Handshake{
identity,
timestamp,
} => {
assert_eq!(identity.as_bytes().len(), 32);
inbuf.write_all(&identity.as_bytes())?;
inbuf.write_u64::<BigEndian>(timestamp)?;
16 // tag
+ 32 // ephermal
+ 64 // signature
}
};
let padding = 256 - ((inbuf.len() + overhead) % 256);
inbuf.extend_from_slice(vec![0u8; padding].as_ref());
let mut buf = vec![0; inbuf.len() + overhead];
let (len, counter) = if let Some(counter) = counter {
(noise.write_message_with_nonce(counter, &inbuf, &mut buf)?, counter + 1)
} else {
(noise.write_message(&inbuf, &mut buf)?, 0)
};
buf.truncate(len);
if cfg!(test) && !noise.was_write_payload_encrypted() {
return Err(NoiseError::PayloadUnencrypted.into());
}
let pkt = packet::EncryptedPacket {
version: 0x08,
route,
direction,
counter,
payload: buf,
};
Ok(pkt)
}
impl Transport {
pub fn send(&mut self, payload: &[u8]) -> Result<packet::EncryptedPacket, Error> {
self.counter += 1;
let pkt = send(
&mut self.noise,
self.route,
self.direction.clone(),
SendMode::Transport {
counter: self.counter,
payload,
},
)?;
assert_eq!(pkt.payload.len() % 256, 0);
Ok(pkt)
}
pub fn recv(&mut self, pkt: packet::EncryptedPacket) -> Result<Vec<u8>, Error> {
if pkt.route != self.route {
return Err(NoiseError::WrongRoute {
dest: pkt.route,
this: self.route,
}.into());
}
if pkt.direction == self.direction {
return Err(NoiseError::WrongDirection { dir: pkt.direction }.into());
}
let mut outbuf = vec![0; pkt.payload.len()];
let len = self
.noise
.read_message_with_nonce(pkt.counter - 1, &pkt.payload, &mut outbuf)?;
outbuf.truncate(len);
if len < 2 {
return Err(NoiseError::TooSmall { need: 2, got: len }.into());
}
let len = (&outbuf[..]).read_u16::<BigEndian>()? as usize;
let mut payload = outbuf.split_off(2);
if len > payload.len() {
return Err(NoiseError::DecryptedInvalidPayloadLen.into());
}
payload.truncate(len);
Ok(payload)
}
pub fn is_initiator(&self) -> bool {
self.direction == RoutingDirection::Initiator2Responder
}
pub fn route(&self) -> RoutingKey {
self.route
}
}
impl HandshakeResponder {
pub fn send_response(
mut self,
route: RoutingKey,
secret: &Secret,
) -> Result<(Transport, packet::EncryptedPacket), Error> {
let mut pkt = send(
&mut self.noise,
route,
RoutingDirection::Responder2Initiator,
SendMode::Handshake{
timestamp: self.timestamp,
identity: secret.identity(),
},
)?;
let signature = secret.sign(b"carrier handshake hash 1", self.noise.get_handshake_hash()?);
pkt.payload.extend_from_slice(&signature.as_bytes());
assert_eq!(pkt.payload.len() % 256, 0);
assert_eq!(pkt.payload.len() % 256, 0);
assert_ne!(route, 0);
Ok((
Transport {
counter: 0,
noise: self.noise.into_stateless_transport_mode()?,
route: route,
direction: RoutingDirection::Responder2Initiator,
},
pkt,
))
}
}
fn recv_handshake(noise: &mut snow::Session, pkt: packet::EncryptedPacket)
-> Result<(Identity, u64), Error>
{
if pkt.payload.len() % 256 != 0 {
return Err(NoiseError::PktMisaligned { len: pkt.payload.len() }.into());
}
let mut signature = [0; 64];
signature.copy_from_slice(&pkt.payload[pkt.payload.len() - 64..pkt.payload.len()]);
let signature = Signature::from_array(signature);
let mut outbuf = vec![0; pkt.payload.len()];
let len = noise.read_message(&pkt.payload[..pkt.payload.len()- 64], &mut outbuf)?;
if len < 120 {
return Err(NoiseError::TooSmall { need: 120, got: len }.into());
}
let identity = Identity::from_bytes(&outbuf[0..32])?;
let timestamp = (&outbuf[32..40]).read_u64::<BigEndian>()?;
let mut reader = &outbuf[40..];
let mut chain = Vec::new();
let numcerts = reader.read_u16::<BigEndian>()?;
for _ in 0..numcerts {
let len = reader.read_u16::<BigEndian>()?;
let mut crt = vec![0;len as usize];
reader.read_exact(&mut crt)?;
chain.push(crt);
}
identity.verify(
b"carrier handshake hash 1",
noise.get_handshake_hash()?,
&signature,
)?;
Ok((identity, timestamp))
}
impl HandshakeRequester {
pub fn recv_response(&mut self, pkt: packet::EncryptedPacket) -> Result<Identity,Error> {
let route = pkt.route;
let (identity, timestamp) = recv_handshake(&mut self.noise, pkt)?;
if timestamp != self.timestamp {
return Err(NoiseError::InvalidCookie.into());
}
self.route = Some(route);
Ok(identity)
}
pub fn into_transport(self) -> Result<Transport, Error> {
Ok(Transport {
counter: 0,
noise: self.noise.into_stateless_transport_mode()?,
route: self
.route
.expect("into_transport can only be called after recv_response"),
direction: RoutingDirection::Initiator2Responder,
})
}
}
pub fn initiate(
remote_static: Option<&Address>,
secret: &Secret,
timestamp: u64,
) -> Result<(HandshakeRequester, packet::EncryptedPacket), Error> {
let mut noise = if let Some(remote_static) = remote_static {
let params: NoiseParams = "Noise_NK_25519_ChaChaPoly_SHA256".parse().unwrap();
new_noise_builder(params)
.remote_public_key(remote_static.as_bytes())
.prologue("carrier has arrived".as_bytes())
.build_initiator()
.expect("building noise session")
} else {
let params: NoiseParams = "Noise_NN_25519_ChaChaPoly_SHA256".parse().unwrap();
new_noise_builder(params)
.prologue("carrier has arrived".as_bytes())
.build_initiator()
.expect("building noise session")
};
let identity = secret.identity();
let mut pkt = send(
&mut noise,
0,
RoutingDirection::Initiator2Responder,
if remote_static.is_some() {
SendMode::Handshake{
identity,
timestamp,
}
} else {
SendMode::InsecureHandshake{
identity,
timestamp,
}
}
)?;
let signature = secret.sign(b"carrier handshake hash 1", noise.get_handshake_hash()?);
pkt.payload.extend_from_slice(&signature.as_bytes());
assert_eq!(pkt.payload.len() % 256, 0);
let s = HandshakeRequester {
timestamp,
noise: noise,
route: None,
};
Ok((s, pkt))
}
pub fn respond(
xsecret: Option<&Secret>,
pkt: packet::EncryptedPacket,
) -> Result<(HandshakeResponder, Identity, u64), Error> {
let mut noise = if let Some(xsecret) = xsecret{
let params: NoiseParams = "Noise_NK_25519_ChaChaPoly_SHA256".parse().unwrap();
new_noise_builder(params)
.local_private_key(xsecret.as_bytes())
.prologue("carrier has arrived".as_bytes())
.build_responder()
.expect("building noise session")
} else {
let params: NoiseParams = "Noise_NN_25519_ChaChaPoly_SHA256".parse().unwrap();
new_noise_builder(params)
.prologue("carrier has arrived".as_bytes())
.build_responder()
.expect("building noise session")
};
let (identity, timestamp) = recv_handshake(&mut noise, pkt)?;
Ok((
HandshakeResponder {
noise,
timestamp,
},
identity,
timestamp,
))
}
#[derive(Default)]
struct RandResolver {
}
use rand::{RngCore};
use rand::rngs::{OsRng};
struct RandomOs {
rng : OsRng
}
impl Default for RandomOs {
fn default() -> RandomOs {
RandomOs {rng: OsRng::new().unwrap()}
}
}
impl snow::types::Random for RandomOs {
fn fill_bytes(&mut self, out: &mut [u8]) {
self.rng.fill_bytes(out);
}
}
impl CryptoResolver for RandResolver {
fn resolve_rng(&self) -> Option<Box<snow::types::Random>> {
Some(Box::new(RandomOs::default()))
}
fn resolve_dh (&self, _ : &snow::params::DHChoice)
-> Option<Box<(dyn snow::types::Dh + 'static)>>
{
None
}
fn resolve_hash(&self, _ : &snow::params::HashChoice)
-> Option<Box<(dyn snow::types::Hash + 'static)>>
{
None
}
fn resolve_cipher(&self, _:&snow::params::CipherChoice)
-> Option<Box<(dyn snow::types::Cipher + 'static)>>
{
None
}
}
fn new_noise_builder<'builder>(params: NoiseParams) -> Builder<'builder> {
Builder::with_resolver(params, Box::new(
FallbackResolver::new(
Box::new(snow::resolvers::HaclStarResolver::default()),
Box::new(RandResolver::default()),
)))
}
/*
#[test]
fn handshake() {
use identity;
// <- s
// ...
let (secret, public) = identity::generate_x25519();
let cert = b"letmein";
let (i, pkt1) = HandshakeBuilder::new().initiate(&public, cert).unwrap();
assert_eq!(pkt1.counter, 0);
assert_eq!(pkt1.receiver, 0);
// -> e, es, u, u[h]
let (r, pkt1) = HandshakeBuilder::new().respond(&secret.0, pkt1).unwrap();
assert_eq!(pkt1, cert);
// <- e, ee, u, u[h]
let cert = b"come on in!";
let (mut r, pkt2_enc) = r.send_response(cert).unwrap();
assert_eq!(pkt2_enc.receiver, i.this_channel);
let (mut i, pkt2_pln) = i.recv_response(pkt2_enc).unwrap();
assert_eq!(pkt2_pln, cert);
assert_ne!(i.peer_channel, 0);
assert_ne!(r.peer_channel, 0);
assert_eq!(i.this_channel, r.peer_channel);
assert_eq!(r.this_channel, i.peer_channel);
let t1 = b"hello";
let p1 = r.send(t1).unwrap();
let t2 = b"send me some packets mate";
let p2 = r.send(t2).unwrap();
assert_eq!(p2.counter, 2);
let p2 = i.recv(p2).unwrap();
assert_eq!(p2, t2);
assert_eq!(p1.counter, 1);
let p1 = i.recv(p1).unwrap();
assert_eq!(p1, t1);
}
#[test]
fn dead_chan() {
use identity;
let (secret, public) = identity::generate_x25519();
let (i, pkt1) = HandshakeBuilder::new().initiate(&public, &[]).unwrap();
let (r, _) = HandshakeBuilder::new().respond(&secret.0, pkt1).unwrap();
let (mut r, pkt2_enc) = r.send_response(&[]).unwrap();
let (mut i, _pkt2_pln) = i.recv_response(pkt2_enc).unwrap();
assert_ne!(i.peer_channel, 0);
assert_ne!(r.peer_channel, 0);
assert_eq!(i.this_channel, r.peer_channel);
assert_eq!(r.this_channel, i.peer_channel);
let t1 = b"hello";
let p1 = r.send(t1).unwrap();
assert_eq!(p1.counter, 1);
let p1 = i.recv(p1).unwrap();
assert_eq!(p1, t1);
let t2 = b"send me some packets mate";
let p2 = r.send(t2).unwrap();
assert!(HandshakeBuilder::new().respond(&secret.0, p2).is_err());
}
#[test]
fn large_pkt() {
use identity;
let (secret, public) = identity::generate_x25519();
let (i, pkt1) = HandshakeBuilder::new().initiate(&public, &[]).unwrap();
let (r, _) = HandshakeBuilder::new().respond(&secret.0, pkt1).unwrap();
let (mut r, pkt2_enc) = r.send_response(&[]).unwrap();
let (mut i, _pkt2_pln) = i.recv_response(pkt2_enc).unwrap();
assert_ne!(i.peer_channel, 0);
assert_ne!(r.peer_channel, 0);
assert_eq!(i.this_channel, r.peer_channel);
assert_eq!(r.this_channel, i.peer_channel);
let t1 = vec![0x23; 60000];
let p1 = r.send(&t1).unwrap();
let t2 = vec![0x28; 60000];
let p2 = r.send(&t2).unwrap();
assert_eq!(p2.counter, 2);
let p2 = i.recv(p2).unwrap();
assert_eq!(p2, t2);
assert_eq!(p1.counter, 1);
let p1 = i.recv(p1).unwrap();
assert_eq!(p1, t1);
}
*/
|
// SPDX-License-Identifier: Apache-2.0
use super::Vm;
use crate::backend::{Command, Thread};
use sallyport::syscall::enarx::MemInfo;
use sallyport::syscall::{SYS_ENARX_BALLOON_MEMORY, SYS_ENARX_MEM_INFO};
use sallyport::KVM_SYSCALL_TRIGGER_PORT;
use super::personality::Personality;
use anyhow::{anyhow, Result};
use kvm_ioctls::{VcpuExit, VcpuFd};
use primordial::{Address, Register};
use sallyport::{Block, Reply};
use std::sync::{Arc, RwLock};
pub struct Cpu<P: Personality> {
fd: VcpuFd,
keep: Arc<RwLock<Vm<P>>>,
}
impl<P: Personality> Cpu<P> {
pub fn new(fd: VcpuFd, keep: Arc<RwLock<Vm<P>>>) -> Result<Self> {
Ok(Self { fd, keep })
}
}
impl<P: Personality> Thread for Cpu<P> {
fn enter(&mut self) -> Result<Command> {
match self.fd.run()? {
VcpuExit::IoOut(port, data) => match port {
KVM_SYSCALL_TRIGGER_PORT => {
let mut keep = self.keep.write().unwrap();
debug_assert_eq!(data.len(), 2);
let block_nr = data[0] as usize + ((data[1] as usize) << 8);
let sallyport: &mut Block = unsafe {
std::slice::from_raw_parts_mut(
keep.syscall_blocks.start.as_mut_ptr(),
keep.syscall_blocks.count.get(),
)
.get_mut(block_nr)
.unwrap()
};
let syscall_nr: i64 = unsafe { sallyport.msg.req.num.into() };
match syscall_nr {
0..=512 => Ok(Command::SysCall(sallyport)),
SYS_ENARX_BALLOON_MEMORY => {
let pages = unsafe { sallyport.msg.req.arg[0].into() };
let result = keep.add_memory(pages).map(|addr| {
let ok_result: [Register<usize>; 2] = [addr.into(), 0.into()];
ok_result
})?;
sallyport.msg.rep = Reply::from(Ok(result));
Ok(Command::Continue)
}
SYS_ENARX_MEM_INFO => {
let mem_slots = keep.kvm.get_nr_memslots();
let virt_start = Address::from(
keep.regions.first().unwrap().as_virt().start.as_ptr(),
);
let mem_info: MemInfo = MemInfo {
virt_start,
mem_slots,
};
let c = sallyport.cursor();
c.write(&mem_info)
.map_err(|_| anyhow!("Failed to allocate MemInfo in Block"))?;
let ok_result: [Register<usize>; 2] = [0.into(), 0.into()];
sallyport.msg.rep = Reply::from(Ok(ok_result));
Ok(Command::Continue)
}
x => Err(anyhow!("syscall {} not implemented", x)),
}
}
_ => Err(anyhow!("data from unexpected port: {}", port)),
},
exit_reason => {
if cfg!(debug_assertions) {
Err(anyhow!(
"{:?} {:#x?} {:#x?}",
exit_reason,
self.fd.get_regs(),
self.fd.get_sregs()
))
} else {
Err(anyhow!("{:?}", exit_reason))
}
}
}
}
}
|
mod peers;
mod addrs;
mod connect;
mod disconnect;
use clap::{ App, AppSettings, SubCommand, ArgMatches };
use context::Context;
pub fn subcommand() -> App<'static, 'static> {
SubCommand::with_name("swarm")
.about("\
Manipulate the network swarm.\n\
\n\
The swarm is the component that opens, listens for, and \
maintains connections to other ipfs peers in the internet.\
")
.setting(AppSettings::ArgRequiredElseHelp)
.subcommands(vec![
peers::subcommand(),
addrs::subcommand(),
connect::subcommand(),
disconnect::subcommand(),
])
}
pub fn run(context: &mut Context, matches: &ArgMatches) {
match matches.subcommand() {
("peers", Some(matches)) => peers::run(context, matches),
("addrs", Some(matches)) => addrs::run(context, matches),
("connect", Some(matches)) => connect::run(context, matches),
("disconnect", Some(matches)) => disconnect::run(context, matches),
_ => unreachable!(),
}
}
|
// This is the main file of RPFM. Here is the main loop that builds the UI and controls
// his events.
// Disable warnings about unknown lints, so we don't have the linter warnings when compiling.
#![allow(unknown_lints)]
// Disable these two clippy linters. They throw a lot of false positives, and it's a pain in the ass
// to separate their warnings from the rest. Also, disable "match_bool" because the methods it suggest
// are harder to read than a match. And "redundant_closure", because the suggerences it gives doesn't work.
#![allow(doc_markdown,useless_format,match_bool,redundant_closure)]
// This disables the terminal window, so it doesn't show up when executing RPFM in Windows.
#![windows_subsystem = "windows"]
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
#[macro_use]
extern crate failure;
extern crate gtk;
extern crate gdk;
extern crate glib;
extern crate gio;
extern crate pango;
extern crate sourceview;
extern crate num;
extern crate url;
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use std::cell::RefCell;
use std::rc::Rc;
use std::fs::{
DirBuilder, copy, remove_file, remove_dir_all
};
use std::env::args;
use failure::Error;
use url::Url;
use gio::prelude::*;
use gio::{
SimpleAction, Menu, MenuExt, MenuModel
};
use gdk::Atom;
use gtk::prelude::*;
use gtk::{
Builder, ApplicationWindow, Grid, TreePath, Clipboard, LinkButton, StyleContext,
TreeView, TreeSelection, TreeStore, ScrolledWindow, Application, CellRendererMode,
CellRendererText, TreeViewColumn, Popover, Button, ResponseType, Label,
ShortcutsWindow, ToVariant, Statusbar, FileChooserNative, FileChooserAction
};
use common::*;
use packfile::packfile::PackFile;
use packfile::packfile::PackedFile;
use packedfile::*;
use packedfile::db::schemas::*;
use packedfile::db::schemas_importer::*;
use settings::*;
use ui::*;
use ui::packedfile_db::*;
use ui::packedfile_loc::*;
use ui::packedfile_text::*;
use ui::packedfile_image::*;
use ui::packedfile_rigidmodel::*;
use ui::settings::*;
use ui::updater::*;
/// This macro is used to clone the variables into the closures without the compiler complaining.
/// This should be BEFORE the `mod xxx` stuff, so submodules can use it too.
macro_rules! clone {
(@param _) => ( _ );
(@param $x:ident) => ( $x );
($($n:ident),+ => move || $body:expr) => (
{
$( let $n = $n.clone(); )+
move || $body
}
);
($($n:ident),+ => move |$($p:tt),+| $body:expr) => (
{
$( let $n = $n.clone(); )+
move |$(clone!(@param $p),)+| $body
}
);
}
mod common;
mod ui;
mod packfile;
mod packedfile;
mod settings;
mod updater;
/// This constant gets RPFM's version from the `Cargo.toml` file, so we don't have to change it
/// in two different places in every update.
const VERSION: &str = env!("CARGO_PKG_VERSION");
/// This constant is used to enable or disable the generation of a new Schema file in compile time.
/// If you don't want to explicity create a new Schema for a game, leave this disabled.
const GENERATE_NEW_SCHEMA: bool = false;
/// This enum represent the current "Operational Mode" for RPFM. The allowed modes are:
/// - `Normal`: Use the default behavior for everything. This is the Default mode.
/// - `MyMod`: Use the `MyMod` specific behavior. This mode is used when you have a "MyMod" selected.
/// This mode holds a tuple `(game_folder_name, mod_name)`:
/// - `game_folder_name` is the folder name for that game in "MyMod"s folder, like `warhammer_2` or `rome_2`).
/// - `mod_name` is the name of the PackFile with `.pack` at the end.
#[derive(Clone)]
enum Mode {
MyMod{ game_folder_name: String, mod_name: String },
Normal,
}
/// This struct contains almost the entirety of the UI stuff, so it's not a fucking chaos when
/// going inside/outside closures. The exceptions for this struct is stuff generated after RPFM is
/// started, like the TreeView for DB PackedFiles or the DB Decoder View.
#[derive(Clone)]
pub struct AppUI {
// Clipboard.
pub clipboard: Clipboard,
// Main window.
pub window: ApplicationWindow,
// MenuBar at the top of the Window.
pub menu_bar: Menu,
// Section of the "MyMod" menu.
pub my_mod_list: Menu,
// Shortcut window.
pub shortcuts_window: ShortcutsWindow,
// This is the box where all the PackedFile Views are created.
pub packed_file_data_display: Grid,
// Status bar at the bottom of the program. To show informative messages.
pub status_bar: Statusbar,
// TreeView used to see the PackedFiles, and his TreeStore and TreeSelection.
pub folder_tree_view: TreeView,
pub folder_tree_store: TreeStore,
pub folder_tree_selection: TreeSelection,
// Column and cells for the `TreeView`.
pub folder_tree_view_cell: CellRendererText,
pub folder_tree_view_column: TreeViewColumn,
// Context Menu Popover for `folder_tree_view`. It's build from a Model, stored here too.
pub folder_tree_view_context_menu: Popover,
pub folder_tree_view_context_menu_model: MenuModel,
// Actions of RPFM's MenuBar.
pub menu_bar_new_packfile: SimpleAction,
pub menu_bar_open_packfile: SimpleAction,
pub menu_bar_save_packfile: SimpleAction,
pub menu_bar_save_packfile_as: SimpleAction,
pub menu_bar_preferences: SimpleAction,
pub menu_bar_quit: SimpleAction,
pub menu_bar_generate_dependency_pack_wh2: SimpleAction,
pub menu_bar_patch_siege_ai_wh2: SimpleAction,
pub menu_bar_create_map_prefab_wh2: SimpleAction,
pub menu_bar_generate_dependency_pack_wh: SimpleAction,
pub menu_bar_patch_siege_ai_wh: SimpleAction,
pub menu_bar_create_map_prefab_wh: SimpleAction,
pub menu_bar_generate_dependency_pack_att: SimpleAction,
pub menu_bar_check_updates: SimpleAction,
pub menu_bar_check_schema_updates: SimpleAction,
pub menu_bar_open_patreon: SimpleAction,
pub menu_bar_about: SimpleAction,
pub menu_bar_change_packfile_type: SimpleAction,
pub menu_bar_my_mod_new: SimpleAction,
pub menu_bar_my_mod_delete: SimpleAction,
pub menu_bar_my_mod_install: SimpleAction,
pub menu_bar_my_mod_uninstall: SimpleAction,
pub menu_bar_change_game_selected: SimpleAction,
// Actions of the Context Menu for `folder_tree_view`.
pub folder_tree_view_add_file: SimpleAction,
pub folder_tree_view_add_folder: SimpleAction,
pub folder_tree_view_add_from_packfile: SimpleAction,
pub folder_tree_view_rename_packedfile: SimpleAction,
pub folder_tree_view_delete_packedfile: SimpleAction,
pub folder_tree_view_extract_packedfile: SimpleAction,
pub folder_tree_view_create_loc: SimpleAction,
pub folder_tree_view_create_db: SimpleAction,
pub folder_tree_view_create_text: SimpleAction,
pub folder_tree_view_mass_import_tsv_files: SimpleAction,
// Model for the Context Menu of the DB Decoder (only the model, the menu is created and destroyed with the decoder).
pub db_decoder_context_menu_model: MenuModel,
}
/// One Function to rule them all, One Function to find them,
/// One Function to bring them all and in the darkness bind them.
fn build_ui(application: &Application) {
// We get all the Arguments provided when starting RPFM. Why? If we are opening a PackFile by
// double-clicking on it (for example, with file asociation in windows) our current dir is the
// one where the PackFile is, not where the `rpfm-code.exe` is. So RPFM gets confused and it
// doesn't find his settings, his schemas,... To fix this, we need to get the folder where the
// executable is and use it as a base for all the path stuff. Note that this should only work on
// release, as the way it works it's used by cargo to run the debug builds.
let arguments = args().collect::<Vec<String>>();
// In debug mode, we just take the current path (so we don't break debug builds). In Release mode,
// we take the `.exe` path. We use unwrap here because in case of fail, we want to crash RPFM.
let rpfm_path: PathBuf = if cfg!(debug_assertions) {
std::env::current_dir().unwrap()
} else {
let mut path = std::env::current_exe().unwrap();
path.pop();
path
};
// We create the `Clipboard`.
let clipboard_atom = Atom::intern("CLIPBOARD");
let clipboard = Clipboard::get(&clipboard_atom);
// We import the Glade design and get all the UI objects into variables.
let help_window = include_str!("gtk/help.ui");
let menus = include_str!("gtk/menus.ui");
let builder = Builder::new_from_string(help_window);
// We add all the UI onjects to the same builder. You know, one to rule them all.
builder.add_from_string(menus).unwrap();
// Create the main window.
let main_window = MainWindow::create_main_window(application, &rpfm_path);
// The Context Menu Popover for `folder_tree_view` it's a little tricky to get. We need to
// get the stuff it's based on and then create it and put it into the AppUI.
let folder_tree_view_context_menu_model = builder.get_object("context_menu_packfile").unwrap();
let folder_tree_view_context_menu = Popover::new_from_model(Some(&main_window.folder_tree_view), &folder_tree_view_context_menu_model);
// First, create the AppUI to hold all the UI stuff. All the stuff here it's from the executable
// so we can unwrap it without any problems.
let app_ui = AppUI {
// Clipboard.
clipboard,
// Main window.
window: main_window.window,
// MenuBar at the top of the Window.
menu_bar: builder.get_object("menubar").unwrap(),
// Section of the "MyMod" menu.
my_mod_list: builder.get_object("my-mod-list").unwrap(),
// Shortcut window.
shortcuts_window: builder.get_object("shortcuts-main-window").unwrap(),
// This is the box where all the PackedFile Views are created.
packed_file_data_display: main_window.packed_file_data_display,
// Status bar at the bottom of the program. To show informative messages.
status_bar: main_window.status_bar,
// TreeView used to see the PackedFiles, and his TreeStore and TreeSelection.
folder_tree_view: main_window.folder_tree_view,
folder_tree_store: main_window.folder_tree_store,
folder_tree_selection: main_window.folder_tree_selection,
// Column and cells for the `TreeView`.
folder_tree_view_cell: main_window.folder_tree_view_cell,
folder_tree_view_column: main_window.folder_tree_view_column,
// Context Menu Popover for `folder_tree_view`. It's build from a Model, stored here too.
folder_tree_view_context_menu,
folder_tree_view_context_menu_model,
// Actions of RPFM's MenuBar.
menu_bar_new_packfile: SimpleAction::new("new-packfile", None),
menu_bar_open_packfile: SimpleAction::new("open-packfile", None),
menu_bar_save_packfile: SimpleAction::new("save-packfile", None),
menu_bar_save_packfile_as: SimpleAction::new("save-packfile-as", None),
menu_bar_preferences: SimpleAction::new("preferences", None),
menu_bar_quit: SimpleAction::new("quit", None),
menu_bar_generate_dependency_pack_wh2: SimpleAction::new("generate-dependency-pack-wh2", None),
menu_bar_patch_siege_ai_wh2: SimpleAction::new("patch-siege-ai-wh2", None),
menu_bar_create_map_prefab_wh2: SimpleAction::new("create-map-prefab-wh2", None),
menu_bar_generate_dependency_pack_wh: SimpleAction::new("generate-dependency-pack-wh", None),
menu_bar_patch_siege_ai_wh: SimpleAction::new("patch-siege-ai-wh", None),
menu_bar_create_map_prefab_wh: SimpleAction::new("create-map-prefab-wh", None),
menu_bar_generate_dependency_pack_att: SimpleAction::new("generate-dependency-pack-att", None),
menu_bar_check_updates: SimpleAction::new("check-updates", None),
menu_bar_check_schema_updates: SimpleAction::new("check-schema-updates", None),
menu_bar_open_patreon: SimpleAction::new("open-patreon", None),
menu_bar_about: SimpleAction::new("about", None),
menu_bar_change_packfile_type: SimpleAction::new_stateful("change-packfile-type", glib::VariantTy::new("s").ok(), &"mod".to_variant()),
menu_bar_my_mod_new: SimpleAction::new("my-mod-new", None),
menu_bar_my_mod_delete: SimpleAction::new("my-mod-delete", None),
menu_bar_my_mod_install: SimpleAction::new("my-mod-install", None),
menu_bar_my_mod_uninstall: SimpleAction::new("my-mod-uninstall", None),
menu_bar_change_game_selected: SimpleAction::new_stateful("change-game-selected", glib::VariantTy::new("s").ok(), &"warhammer_2".to_variant()),
// Actions of the Context Menu for `folder_tree_view`.
folder_tree_view_add_file: SimpleAction::new("add-file", None),
folder_tree_view_add_folder: SimpleAction::new("add-folder", None),
folder_tree_view_add_from_packfile: SimpleAction::new("add-from-packfile", None),
folder_tree_view_rename_packedfile: SimpleAction::new("rename-packedfile", None),
folder_tree_view_delete_packedfile: SimpleAction::new("delete-packedfile", None),
folder_tree_view_extract_packedfile: SimpleAction::new("extract-packedfile", None),
folder_tree_view_create_loc: SimpleAction::new("create-loc", None),
folder_tree_view_create_db: SimpleAction::new("create-db", None),
folder_tree_view_create_text: SimpleAction::new("create-text", None),
folder_tree_view_mass_import_tsv_files: SimpleAction::new("mass-import-tsv", None),
// Model for the Context Menu of the DB Decoder (only the model, the menu is created and destroyed with the decoder).
db_decoder_context_menu_model: builder.get_object("context_menu_db_decoder").unwrap(),
};
// Set the main menu bar for the app. This one can appear in all the windows and needs to be
// enabled or disabled per window.
application.set_menubar(&app_ui.menu_bar);
// Config stuff for `app_ui.shortcuts_window`.
app_ui.shortcuts_window.set_title("Shortcuts");
app_ui.shortcuts_window.set_size_request(600, 400);
app_ui.window.set_help_overlay(Some(&app_ui.shortcuts_window));
// Config stuff for MenuBar Actions.
application.add_action(&app_ui.menu_bar_new_packfile);
application.add_action(&app_ui.menu_bar_open_packfile);
application.add_action(&app_ui.menu_bar_save_packfile);
application.add_action(&app_ui.menu_bar_save_packfile_as);
application.add_action(&app_ui.menu_bar_preferences);
application.add_action(&app_ui.menu_bar_quit);
application.add_action(&app_ui.menu_bar_generate_dependency_pack_wh2);
application.add_action(&app_ui.menu_bar_patch_siege_ai_wh2);
application.add_action(&app_ui.menu_bar_create_map_prefab_wh2);
application.add_action(&app_ui.menu_bar_generate_dependency_pack_wh);
application.add_action(&app_ui.menu_bar_patch_siege_ai_wh);
application.add_action(&app_ui.menu_bar_create_map_prefab_wh);
application.add_action(&app_ui.menu_bar_generate_dependency_pack_att);
application.add_action(&app_ui.menu_bar_open_patreon);
application.add_action(&app_ui.menu_bar_about);
application.add_action(&app_ui.menu_bar_check_updates);
application.add_action(&app_ui.menu_bar_check_schema_updates);
application.add_action(&app_ui.menu_bar_change_packfile_type);
application.add_action(&app_ui.menu_bar_my_mod_new);
application.add_action(&app_ui.menu_bar_my_mod_delete);
application.add_action(&app_ui.menu_bar_my_mod_install);
application.add_action(&app_ui.menu_bar_my_mod_uninstall);
application.add_action(&app_ui.menu_bar_change_game_selected);
// Config stuff for ´folder_tree_view´ specific Actions.
application.add_action(&app_ui.folder_tree_view_add_file);
application.add_action(&app_ui.folder_tree_view_add_folder);
application.add_action(&app_ui.folder_tree_view_add_from_packfile);
application.add_action(&app_ui.folder_tree_view_rename_packedfile);
application.add_action(&app_ui.folder_tree_view_delete_packedfile);
application.add_action(&app_ui.folder_tree_view_extract_packedfile);
application.add_action(&app_ui.folder_tree_view_create_loc);
application.add_action(&app_ui.folder_tree_view_create_db);
application.add_action(&app_ui.folder_tree_view_create_text);
application.add_action(&app_ui.folder_tree_view_mass_import_tsv_files);
// Some Accels need to be specified here. Don't know why, but otherwise they do not work.
application.set_accels_for_action("app.add-file", &["<Primary>a"]);
application.set_accels_for_action("app.add-folder", &["<Primary>d"]);
application.set_accels_for_action("app.add-from-packfile", &["<Primary>w"]);
application.set_accels_for_action("app.rename-packedfile", &["<Primary>r"]);
application.set_accels_for_action("app.delete-packedfile", &["<Primary>Delete"]);
application.set_accels_for_action("app.extract-packedfile", &["<Primary>e"]);
application.set_accels_for_action("win.show-help-overlay", &["<Primary><Shift>h"]);
// We enable D&D PackFiles to `app_ui.folder_tree_view` to open them.
let targets = vec![gtk::TargetEntry::new("text/uri-list", gtk::TargetFlags::OTHER_APP, 0)];
app_ui.folder_tree_view.drag_dest_set(gtk::DestDefaults::ALL, &targets, gdk::DragAction::COPY);
// Then we display the "Tips" text.
display_help_tips(&app_ui.packed_file_data_display);
// This is to get the new schemas. It's controlled by a global const.
if GENERATE_NEW_SCHEMA {
// These are the paths needed for the new schemas. First one should be `assembly_kit/raw_data/db`.
// The second one should contain all the tables of the game, extracted directly from `data.pack`.
let assembly_kit_schemas_path: PathBuf = PathBuf::from("/home/frodo45127/schema_stuff/db_schemas/");
let testing_tables_path: PathBuf = PathBuf::from("/home/frodo45127/schema_stuff/db_tables/");
match import_schema(&assembly_kit_schemas_path, &testing_tables_path, &rpfm_path) {
Ok(_) => show_dialog(&app_ui.window, true, "Schema successfully created."),
Err(error) => return show_dialog(&app_ui.window, false, format!("Error while creating a new DB Schema file:\n{}", error.cause())),
}
}
// This variable is used to "Lock" the "Decode on select" feature of `app_ui.folder_tree_view`.
// We need it to lock this feature when we open a secondary PackFile and want to import some
// PackedFiles to our opened PackFile.
let is_folder_tree_view_locked = Rc::new(RefCell::new(false));
// This variable is used to "Lock" the "Delete PackedFile" action. We need this because this is
// the only action that can change the index of a PackedFile while it's open, causing it to try
// to save itself in the position of another PackedFile. This can trigger data corruption or an
// "index out of bounds" CTD in runtime, so we need this variable to check if we can delete a
// PackedFile before even trying it.
let is_packedfile_opened = Rc::new(RefCell::new(false));
// Here we define the `Accept` response for GTK, as it seems Restson causes it to fail to compile
// if we get them to i32 directly in the `if` statement.
// NOTE: For some bizarre reason, GTKFileChoosers return `Ok`, while native ones return `Accept`.
let gtk_response_accept: i32 = ResponseType::Accept.into();
// We need two PackFiles:
// - `pack_file_decoded`: This one will hold our opened PackFile.
// - `pack_file_decoded_extra`: This one will hold the PackFile opened for `app_ui.add_from_packfile`.
let pack_file_decoded = Rc::new(RefCell::new(PackFile::new()));
let pack_file_decoded_extra = Rc::new(RefCell::new(PackFile::new()));
// We load the list of Supported Games here.
// TODO: Move this to a const when const fn reach stable in Rust.
let supported_games = Rc::new(RefCell::new(GameInfo::new()));
// We load the settings here, and in case they doesn't exist, we create them.
let settings = Rc::new(RefCell::new(Settings::load(&rpfm_path, &supported_games.borrow()).unwrap_or_else(|_|Settings::new(&supported_games.borrow()))));
// Load the GTK Settings, like the Theme and Font used.
load_gtk_settings(&app_ui.window, &settings.borrow());
// We prepare the schema object to hold an Schema, leaving it as `None` by default.
let schema: Rc<RefCell<Option<Schema>>> = Rc::new(RefCell::new(None));
// This specifies the "Operational Mode" RPFM should use. By default it's Normal.
let mode = Rc::new(RefCell::new(Mode::Normal));
// And we prepare the stuff for the default game (paths, and those things).
let game_selected = Rc::new(RefCell::new(GameSelected::new(&settings.borrow(), &rpfm_path, &supported_games.borrow())));
// Set the default game as selected game.
app_ui.menu_bar_change_game_selected.change_state(&(&settings.borrow().default_game).to_variant());
// Try to open the dependency PackFile of our `game_selected`.
let dependency_database = match packfile::open_packfile(game_selected.borrow().game_dependency_packfile_path.to_path_buf()) {
Ok(pack_file) => Rc::new(RefCell::new(Some(pack_file.data.packed_files))),
Err(_) => Rc::new(RefCell::new(None)),
};
// Prepare the "MyMod" menu. This... atrocity needs to be in the following places for MyMod to open PackFiles:
// - At the start of the program (here).
// - At the end of MyMod creation.
// - At the end of MyMod deletion.
// - At the end of settings update.
build_my_mod_menu(
application,
&app_ui,
&settings.borrow(),
&mode,
&schema,
&game_selected,
&supported_games,
&dependency_database,
&pack_file_decoded,
&pack_file_decoded_extra,
&rpfm_path
);
// Check for updates at the start if we have this option enabled. Currently this hangs the UI,
// so do it before showing the UI.
if settings.borrow().check_updates_on_start {
check_updates(VERSION, None, Some(&app_ui.status_bar));
}
// Same with schema updates.
if settings.borrow().check_schema_updates_on_start {
check_schema_updates(VERSION, &rpfm_path, &supported_games.borrow(), &game_selected, &schema, None, Some(&app_ui.status_bar));
}
// Concatenate and push again the last two messages of the Statusbar, to be able to show both message at the same time.
// FIXME: This is a dirty trick, so it should be fixed in the future.
concatenate_check_update_messages(&app_ui.status_bar);
// We bring up the main window.
app_ui.window.show_all();
// End of the "Getting Ready" part.
// From here, it's all event handling.
// First, we catch the close window event, and close the program when we do it.
app_ui.window.connect_delete_event(clone!(
application,
pack_file_decoded,
app_ui => move |_,_| {
// If the current PackFile has been changed in any way, we pop up the "Are you sure?" message.
if are_you_sure(&app_ui.window, pack_file_decoded.borrow().extra_data.is_modified, false) {
// If we got confirmation...
application.quit()
}
Inhibit(true)
}
));
// Set the current "Operational Mode" to `Normal`.
set_my_mod_mode(&app_ui, &mode, None);
// Disable the "PackFile Management" actions by default.
enable_packfile_actions(&app_ui, &game_selected, false);
// Disable all the Contextual Menu actions by default.
app_ui.folder_tree_view_add_file.set_enabled(false);
app_ui.folder_tree_view_add_folder.set_enabled(false);
app_ui.folder_tree_view_add_from_packfile.set_enabled(false);
app_ui.folder_tree_view_rename_packedfile.set_enabled(false);
app_ui.folder_tree_view_delete_packedfile.set_enabled(false);
app_ui.folder_tree_view_extract_packedfile.set_enabled(false);
app_ui.folder_tree_view_create_loc.set_enabled(false);
app_ui.folder_tree_view_create_db.set_enabled(false);
app_ui.folder_tree_view_create_text.set_enabled(false);
app_ui.folder_tree_view_mass_import_tsv_files.set_enabled(false);
// If there is a "MyMod" path set in the settings...
if let Some(ref path) = settings.borrow().paths.my_mods_base_path {
// And it's a valid directory, enable the "New MyMod" button.
if path.is_dir() { app_ui.menu_bar_my_mod_new.set_enabled(true); }
// Otherwise, disable it.
else { app_ui.menu_bar_my_mod_new.set_enabled(false); }
}
// Otherwise, disable it.
else { app_ui.menu_bar_my_mod_new.set_enabled(false); }
/*
--------------------------------------------------------
Superior Menu: "File"
--------------------------------------------------------
*/
// When we hit the "New PackFile" button or use his shortcut.
app_ui.menu_bar_new_packfile.connect_activate(clone!(
app_ui,
schema,
game_selected,
supported_games,
rpfm_path,
mode,
pack_file_decoded_extra,
pack_file_decoded => move |_,_| {
// If the current PackFile has been changed in any way, we pop up the "Are you sure?" message.
if are_you_sure(&app_ui.window, pack_file_decoded.borrow().extra_data.is_modified, false) {
// If there is no secondary PackFile opened using the "Data View" at the right side...
if pack_file_decoded_extra.borrow().extra_data.file_name.is_empty() {
// We need to destroy any children that the packed_file_data_display we use may have, cleaning it.
let children_to_utterly_destroy = app_ui.packed_file_data_display.get_children();
if !children_to_utterly_destroy.is_empty() {
for i in &children_to_utterly_destroy {
i.destroy();
}
}
// Show the "Tips".
display_help_tips(&app_ui.packed_file_data_display);
}
// Get the ID for the new PackFile.
let pack_file_id = supported_games.borrow().iter().filter(|x| x.folder_name == game_selected.borrow().game).map(|x| x.id.to_owned()).collect::<String>();
// Create the new PackFile.
*pack_file_decoded.borrow_mut() = packfile::new_packfile("unknown.pack".to_string(), &pack_file_id);
// Clear the `TreeView` before updating it (fixes CTD with borrowed PackFile).
app_ui.folder_tree_store.clear();
// Build the `TreeView`.
update_treeview(
&app_ui.folder_tree_store,
&*pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::Build,
&TreePathType::None,
);
// Set the new mod as "Not modified".
set_modified(false, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
// Enable the actions available for the PackFile from the `MenuBar`.
enable_packfile_actions(&app_ui, &game_selected, true);
// Set the current "Operational Mode" to Normal, as this is a "New" mod.
set_my_mod_mode(&app_ui, &mode, None);
// Try to load the Schema for this PackFile's game.
*schema.borrow_mut() = Schema::load(&rpfm_path, &supported_games.borrow().iter().filter(|x| x.folder_name == *game_selected.borrow().game).map(|x| x.schema.to_owned()).collect::<String>()).ok();
}
}
));
// When we hit the "Open PackFile" button.
app_ui.menu_bar_open_packfile.connect_activate(clone!(
app_ui,
game_selected,
rpfm_path,
schema,
settings,
mode,
supported_games,
dependency_database,
pack_file_decoded_extra,
pack_file_decoded => move |_,_| {
// If the current PackFile has been changed in any way, we pop up the "Are you sure?" message.
if are_you_sure(&app_ui.window, pack_file_decoded.borrow().extra_data.is_modified, false) {
// If we got confirmation...
let file_chooser_open_packfile = FileChooserNative::new(
"Open PackFile...",
&app_ui.window,
FileChooserAction::Open,
"Accept",
"Cancel"
);
// We only want to open PackFiles, so only show them.
file_chooser_filter_packfile(&file_chooser_open_packfile, "*.pack");
// In case we have a default path for the game selected, we use it as base path for opening files.
if let Some(ref path) = game_selected.borrow().game_data_path {
// We check that actually exists before setting it.
if path.is_dir() {
file_chooser_open_packfile.set_current_folder(&path);
}
}
// If we hit "Accept"...
if file_chooser_open_packfile.run() == gtk_response_accept {
// Open the PackFile (or die trying it!).
if let Err(error) = open_packfile(
file_chooser_open_packfile.get_filename().unwrap(),
&rpfm_path,
&app_ui,
&settings.borrow(),
&mode,
&schema,
&supported_games.borrow(),
&game_selected,
&dependency_database,
&(false, None),
&pack_file_decoded,
&pack_file_decoded_extra
) { show_dialog(&app_ui.window, false, error.cause()) };
}
}
}
));
// When we hit the "Save PackFile" button
app_ui.menu_bar_save_packfile.connect_activate(clone!(
pack_file_decoded,
settings,
app_ui => move |_,_| {
// If our PackFile is editable...
if pack_file_decoded.borrow().is_editable(&settings.borrow()) {
// If our PackFile already exists in the filesystem, we save it to that file directly.
if pack_file_decoded.borrow().extra_data.file_path.is_file() {
// We try to save the PackFile at the provided path...
let success = match packfile::save_packfile(&mut *pack_file_decoded.borrow_mut(), None) {
Ok(_) => {
show_dialog(&app_ui.window, true, "PackFile succesfully saved.");
true
},
Err(error) => {
show_dialog(&app_ui.window, false, error.cause());
false
}
};
// If we succeed...
if success {
// Set the mod as "Not modified".
set_modified(false, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
}
}
// If our PackFile doesn't exist in the filesystem (it's new, or the base PackFile has been deleted),
// we trigger the "Save as" dialog.
else { app_ui.menu_bar_save_packfile_as.activate(None); }
}
// Otherwise, return a Message specifying the error.
else { show_dialog(&app_ui.window, false, "This type of PackFile is supported in Read-Only mode.\n\nThis can happen due to:\n - The PackFile's type is 'Boot', 'Release' or 'Patch' and you have 'Allow edition of CA PackFiles' disabled in the settings.\n - The PackFile's type is 'Other'.\n\n If you really want to save it, go to 'PackFile/Change PackFile Type' and change his type to 'Mod' or 'Movie'."); }
}
));
// When we hit the "Save PackFile as" button.
app_ui.menu_bar_save_packfile_as.connect_activate(clone!(
pack_file_decoded,
game_selected,
settings,
app_ui,
mode => move |_,_| {
// If our PackFile is editable...
if pack_file_decoded.borrow().is_editable(&settings.borrow()) {
// Create the FileChooserNative.
let file_chooser_save_packfile = FileChooserNative::new(
"Save PackFile as...",
&app_ui.window,
FileChooserAction::Save,
"Save",
"Cancel"
);
// We want to ask before overwriting files. Just in case. Otherwise, there can be an accident.
file_chooser_save_packfile.set_do_overwrite_confirmation(true);
// We are only interested in seeing ".pack" files.
file_chooser_filter_packfile(&file_chooser_save_packfile, "*.pack");
// We put the current name of the file as "Suggested" name.
file_chooser_save_packfile.set_current_name(&pack_file_decoded.borrow().extra_data.file_name);
// If we are saving an existing PackFile with another name, we start in his current path.
if pack_file_decoded.borrow().extra_data.file_path.is_file() {
file_chooser_save_packfile.set_filename(&pack_file_decoded.borrow().extra_data.file_path);
}
// In case we have a default path for the game selected and that path is valid, we use it as base path for saving our PackFile.
else if let Some(ref path) = game_selected.borrow().game_data_path {
// We check it actually exists before setting it.
if path.is_dir() {
file_chooser_save_packfile.set_current_folder(path);
}
}
// If we hit "Accept" (and "Accept" again if we are overwriting a PackFile)...
if file_chooser_save_packfile.run() == gtk_response_accept {
// Get the new PackFile's path.
let mut file_path = file_chooser_save_packfile.get_filename().unwrap();
// If the new PackFile's name doesn't end in ".pack", we add it at the end.
if !file_path.ends_with(".pack") { file_path.set_extension("pack"); }
// We try to save the PackFile at the provided path...
let success = match packfile::save_packfile(&mut *pack_file_decoded.borrow_mut(), Some(file_path.to_path_buf())) {
Ok(_) => {
show_dialog(&app_ui.window, true, "PackFile succesfully saved.");
true
},
Err(error) => {
show_dialog(&app_ui.window, false, error.cause());
false
}
};
// If we succeed...
if success {
// Set the mod as "Not modified".
set_modified(false, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
// Select the first `TreeIter`, so the rename works.
app_ui.folder_tree_selection.select_iter(&app_ui.folder_tree_store.get_iter_first().unwrap());
// Update the TreeView to reflect the possible PackFile name change.
update_treeview(
&app_ui.folder_tree_store,
&*pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::Rename(file_path.file_name().unwrap().to_string_lossy().as_ref().to_owned()),
&TreePathType::None,
);
// Set the current "Operational Mode" to Normal, just in case "MyMod" is the current one.
set_my_mod_mode(&app_ui, &mode, None);
}
}
}
// Otherwise, return a Message specifying the error.
else { show_dialog(&app_ui.window, false, "This type of PackFile is supported in Read-Only mode.\n\nThis can happen due to:\n - The PackFile's type is 'Boot', 'Release' or 'Patch' and you have 'Allow edition of CA PackFiles' disabled in the settings.\n - The PackFile's type is 'Other'.\n\n If you really want to save it, go to 'PackFile/Change PackFile Type' and change his type to 'Mod' or 'Movie'."); }
}
));
// When changing the type of the opened PackFile.
app_ui.menu_bar_change_packfile_type.connect_activate(clone!(
app_ui,
pack_file_decoded => move |menu_bar_change_packfile_type, selected_type| {
if let Some(state) = selected_type.clone() {
let new_state: Option<String> = state.get();
match &*new_state.unwrap() {
"boot" => {
if pack_file_decoded.borrow().header.pack_file_type != 0 {
pack_file_decoded.borrow_mut().header.pack_file_type = 0;
menu_bar_change_packfile_type.change_state(&"boot".to_variant());
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
}
}
"release" => {
if pack_file_decoded.borrow().header.pack_file_type != 1 {
pack_file_decoded.borrow_mut().header.pack_file_type = 1;
menu_bar_change_packfile_type.change_state(&"release".to_variant());
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
}
}
"patch" => {
if pack_file_decoded.borrow().header.pack_file_type != 2 {
pack_file_decoded.borrow_mut().header.pack_file_type = 2;
menu_bar_change_packfile_type.change_state(&"patch".to_variant());
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
}
}
"mod" => {
if pack_file_decoded.borrow().header.pack_file_type != 3 {
pack_file_decoded.borrow_mut().header.pack_file_type = 3;
menu_bar_change_packfile_type.change_state(&"mod".to_variant());
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
}
}
"movie" => {
if pack_file_decoded.borrow().header.pack_file_type != 4 {
pack_file_decoded.borrow_mut().header.pack_file_type = 4;
menu_bar_change_packfile_type.change_state(&"movie".to_variant());
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
}
}
_ => {
if pack_file_decoded.borrow().header.pack_file_type != 9999 {
pack_file_decoded.borrow_mut().header.pack_file_type = 9999;
menu_bar_change_packfile_type.change_state(&"other".to_variant());
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
}
}
}
}
}
));
// When we hit the "Preferences" button.
app_ui.menu_bar_preferences.connect_activate(clone!(
app_ui,
game_selected,
supported_games,
pack_file_decoded,
settings,
rpfm_path,
mode,
application,
dependency_database,
pack_file_decoded_extra,
schema => move |_,_| {
// We disable the action, so we can't start 2 "Settings" windows at the same time.
app_ui.menu_bar_preferences.set_enabled(false);
// We create the "Settings Window" and load our current settings to it.
let settings_stuff = Rc::new(RefCell::new(SettingsWindow::create_settings_window(&application, &app_ui.window, &rpfm_path, &supported_games.borrow())));
settings_stuff.borrow().load_to_settings_window(&*settings.borrow());
// When we press the "Accept" button.
settings_stuff.borrow().settings_accept.connect_button_release_event(clone!(
pack_file_decoded,
app_ui,
settings_stuff,
settings,
game_selected,
supported_games,
rpfm_path,
schema,
mode,
dependency_database,
pack_file_decoded_extra,
application => move |_,_| {
// Save a copy of our old `Settings` to use in the checks below.
let old_settings = settings.borrow().clone();
// Save the current `Settings` from the "Settings Window" as our new `Settings`.
*settings.borrow_mut() = settings_stuff.borrow().save_from_settings_window(&supported_games.borrow());
// Save our new `Settings` to a settings file, and report in case of error.
if let Err(error) = settings.borrow().save(&rpfm_path) {
show_dialog(&app_ui.window, false, error.cause());
}
// Destroy the "Settings Window".
settings_stuff.borrow().settings_window.destroy();
// Restore the action, so we can open another "Settings Window" again.
app_ui.menu_bar_preferences.set_enabled(true);
// If we changed the "MyMod's Folder" path...
if settings.borrow().paths.my_mods_base_path != old_settings.paths.my_mods_base_path {
// And we have currently opened a "MyMod"...
if let Mode::MyMod{..} = *mode.borrow() {
// We disable the "MyMod" mode, but leave the PackFile open, so the user doesn't lose any unsaved change.
set_my_mod_mode(&app_ui, &mode, None);
// Then recreate the "MyMod" submenu.
build_my_mod_menu(
&application,
&app_ui,
&settings.borrow(),
&mode,
&schema,
&game_selected,
&supported_games,
&dependency_database,
&pack_file_decoded,
&pack_file_decoded_extra,
&rpfm_path
);
}
}
// If there is a "MyMod" path set in the settings...
if let Some(ref path) = settings.borrow().paths.my_mods_base_path {
// And it's a valid directory, enable the "New MyMod" button.
if path.is_dir() { app_ui.menu_bar_my_mod_new.set_enabled(true); }
// Otherwise, disable it.
else { app_ui.menu_bar_my_mod_new.set_enabled(false); }
}
// Otherwise, disable it.
else { app_ui.menu_bar_my_mod_new.set_enabled(false); }
// If we have changed the path of any of the games, and that game is the current `GameSelected`,
// update the current `GameSelected`.
let new_game_paths = settings.borrow().paths.game_paths.clone();
let game_paths = new_game_paths.iter().zip(old_settings.paths.game_paths.iter());
let changed_paths_games = game_paths.filter(|x| x.0.path != x.1.path).map(|x| x.0.game.to_owned()).collect::<Vec<String>>();
// If our current `GameSelected` is in the `changed_paths_games` list...
if changed_paths_games.contains(&game_selected.borrow().game) {
// Re-select the same game, so `GameSelected` update his paths.
let new_game_selected = game_selected.borrow().game.to_owned();
app_ui.menu_bar_change_game_selected.activate(Some(&new_game_selected.to_variant()));
}
Inhibit(false)
}
));
// When we press the "Cancel" button, we close the window.
settings_stuff.borrow().settings_cancel.connect_button_release_event(clone!(
settings_stuff,
settings,
rpfm_path,
supported_games,
app_ui => move |_,_| {
// Destroy the "Settings Window".
settings_stuff.borrow().settings_window.destroy();
// Restore the action, so we can open another "Settings Window" again.
app_ui.menu_bar_preferences.set_enabled(true);
// Reload the old `Settings` from the "Settings File" so, if we have changed anything, it's undone.
*settings.borrow_mut() = Settings::load(&rpfm_path, &supported_games.borrow()).unwrap_or_else(|_|Settings::new(&supported_games.borrow()));
// Reload the GTK-Related settings.
load_gtk_settings(&app_ui.window, &settings.borrow());
Inhibit(false)
}
));
// We catch the destroy event to restore the "Preferences" button.
settings_stuff.borrow().settings_window.connect_delete_event(clone!(
settings,
rpfm_path,
supported_games,
app_ui => move |settings_window, _| {
// Destroy the "Settings Window".
settings_window.destroy();
// Restore the action, so we can open another "Settings Window" again.
app_ui.menu_bar_preferences.set_enabled(true);
// Reload the old `Settings` from the "Settings File" so, if we have changed anything, it's undone.
*settings.borrow_mut() = Settings::load(&rpfm_path, &supported_games.borrow()).unwrap_or_else(|_|Settings::new(&supported_games.borrow()));
// Reload the GTK-Related settings.
load_gtk_settings(&app_ui.window, &settings.borrow());
Inhibit(false)
}
));
}
));
// When we hit the "Quit" button.
app_ui.menu_bar_quit.connect_activate(clone!(
application,
pack_file_decoded,
app_ui => move |_,_| {
// If the current PackFile has been changed in any way, we pop up the "Are you sure?" message.
if are_you_sure(&app_ui.window, pack_file_decoded.borrow().extra_data.is_modified, false) {
application.quit();
}
}
));
/*
--------------------------------------------------------
Superior Menu: "My Mod"
--------------------------------------------------------
*/
// When we hit the "New mod" button.
app_ui.menu_bar_my_mod_new.connect_activate(clone!(
app_ui,
settings,
application,
schema,
game_selected,
supported_games,
rpfm_path,
mode,
dependency_database,
pack_file_decoded_extra,
pack_file_decoded => move |_,_| {
// We disable the action, so we can't start 2 "New MyMod" windows at the same time.
app_ui.menu_bar_my_mod_new.set_enabled(false);
// Create the the "New MyMod" window and put all it's stuff into a variable.
let new_mod_stuff = Rc::new(RefCell::new(MyModNewWindow::create_my_mod_new_window(&application, &app_ui.window, &supported_games.borrow(), &game_selected.borrow(), &settings.borrow(), &rpfm_path)));
// When we press the "Accept" button.
new_mod_stuff.borrow().my_mod_new_accept.connect_button_release_event(clone!(
new_mod_stuff,
application,
app_ui,
settings,
schema,
mode,
supported_games,
rpfm_path,
game_selected,
dependency_database,
pack_file_decoded_extra,
pack_file_decoded => move |_,_| {
// Get the mod name.
let mod_name = new_mod_stuff.borrow().my_mod_new_name_entry.get_buffer().get_text();
// Get the PackFile name.
let full_mod_name = format!("{}.pack", mod_name);
// Change the `GameSelected` with the one we have chosen for the new "MyMod".
let new_mod_game = &*new_mod_stuff.borrow().my_mod_new_game_list_combo.get_active_id().unwrap().to_owned();
app_ui.menu_bar_change_game_selected.activate(Some(&new_mod_game.to_variant()));
// Get the ID for the new PackFile.
let pack_file_id = supported_games.borrow().iter().filter(|x| x.folder_name == game_selected.borrow().game).map(|x| x.id.to_owned()).collect::<String>();
// Create the new PackFile.
*pack_file_decoded.borrow_mut() = packfile::new_packfile(full_mod_name.to_owned(), &pack_file_id);
// Clear the `TreeView` before updating it (fixes CTD with borrowed PackFile).
app_ui.folder_tree_store.clear();
// Build the `TreeView`.
update_treeview(
&app_ui.folder_tree_store,
&*pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::Build,
&TreePathType::None,
);
// Set the new mod as "Not modified".
set_modified(false, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
// Enable the actions available for the PackFile from the `MenuBar`.
enable_packfile_actions(&app_ui, &game_selected, true);
// Get his new path from the base "MyMod" path + `new_mod_game`.
let mut my_mod_path = settings.borrow().paths.my_mods_base_path.clone().unwrap();
my_mod_path.push(&new_mod_game);
// Just in case the folder doesn't exist, we try to create it. It's save to ignore this result.
match DirBuilder::new().create(&my_mod_path){
Ok(_) | Err(_) => { /* This returns ok if it created the folder and err if it already exist. */ }
};
// We need to create another folder inside the game's folder with the name of the new "MyMod", to store extracted files.
let mut my_mod_private_folder = my_mod_path.to_path_buf();
my_mod_private_folder.push(mod_name.to_owned());
match DirBuilder::new().create(&my_mod_private_folder) {
Ok(_) | Err(_) => { /* This returns ok if it created the folder and err if it already exist. */ }
};
// Add the PackFile name to the full path.
my_mod_path.push(full_mod_name.to_owned());
// Then we try to save the new "MyMod"s PackFile, and show a message in case of error.
if let Err(error) = packfile::save_packfile(&mut pack_file_decoded.borrow_mut(), Some(my_mod_path.to_owned())) {
show_dialog(&app_ui.window, false, error.cause());
}
// If the new "MyMod" has been saved successfully...
else {
// Set the current "Operational Mode" to `MyMod`.
set_my_mod_mode(&app_ui, &mode, Some(my_mod_path));
// Recreate the "MyMod" menu.
build_my_mod_menu(
&application,
&app_ui,
&settings.borrow(),
&mode,
&schema,
&game_selected,
&supported_games,
&dependency_database,
&pack_file_decoded,
&pack_file_decoded_extra,
&rpfm_path
);
// Destroy the "New MyMod" window,
new_mod_stuff.borrow().my_mod_new_window.destroy();
// Restore the action, so we can open another "New MyMod" window again.
app_ui.menu_bar_my_mod_new.set_enabled(true);
}
Inhibit(false)
}
));
// When we press the "Cancel" button, we close the window and re-enable the "New mod" action.
new_mod_stuff.borrow().my_mod_new_cancel.connect_button_release_event(clone!(
new_mod_stuff,
app_ui => move |_,_| {
// Destroy the "New MyMod" window,
new_mod_stuff.borrow().my_mod_new_window.destroy();
// Restore the action, so we can open another "New MyMod" window again.
app_ui.menu_bar_my_mod_new.set_enabled(true);
Inhibit(false)
}
));
// We catch the destroy event to restore the "New mod" action.
new_mod_stuff.borrow().my_mod_new_window.connect_delete_event(clone!(
app_ui => move |my_mod_new_window, _| {
// Destroy the "New MyMod" window,
my_mod_new_window.destroy();
// Restore the action, so we can open another "New MyMod" window again.
app_ui.menu_bar_my_mod_new.set_enabled(true);
Inhibit(false)
}
));
}
));
// When we hit the "Delete" button.
app_ui.menu_bar_my_mod_delete.connect_activate(clone!(
app_ui,
application,
settings,
schema,
game_selected,
rpfm_path,
mode,
supported_games,
dependency_database,
pack_file_decoded_extra,
pack_file_decoded => move |_,_| {
// This will delete stuff from disk, so we pop up the "Are you sure?" message to avoid accidents.
if are_you_sure(&app_ui.window, true, true) {
// We want to keep our "MyMod" name for the success message, so we store it here.
let old_mod_name: String;
// If we have a "MyMod" selected...
let mod_deleted = match *mode.borrow() {
Mode::MyMod {ref game_folder_name, ref mod_name} => {
// We save the name of the PackFile for later use.
old_mod_name = mod_name.to_owned();
// And the "MyMod" path is configured...
if let Some(ref my_mods_base_path) = settings.borrow().paths.my_mods_base_path {
// We get his path.
let mut my_mod_path = my_mods_base_path.to_path_buf();
my_mod_path.push(&game_folder_name);
my_mod_path.push(&mod_name);
// We check that path exists.
if !my_mod_path.is_file() {
return show_dialog(&app_ui.window, false, "PackFile doesn't exist.");
}
// And we delete that PackFile.
if let Err(error) = remove_file(&my_mod_path).map_err(Error::from) {
return show_dialog(&app_ui.window, false, error.cause());
}
// Now we get his asset folder.
let mut my_mod_assets_path = my_mod_path.clone();
my_mod_assets_path.pop();
my_mod_assets_path.push(&my_mod_path.file_stem().unwrap().to_string_lossy().as_ref().to_owned());
// We check that path exists. This is optional, so it should allow the deletion
// process to continue with a warning.
if !my_mod_assets_path.is_dir() {
show_dialog(&app_ui.window, false, "Mod deleted, but his assets folder hasn't been found.");
}
// If the assets folder exists, we try to delete it.
else if let Err(error) = remove_dir_all(&my_mod_assets_path).map_err(Error::from) {
return show_dialog(&app_ui.window, false, error.cause());
}
// We return true, as we have delete the files of the "MyMod".
true
}
// If the "MyMod" path is not configured, return an error.
else {
return show_dialog(&app_ui.window, false, "MyMod base path not configured.");
}
}
// If we don't have a "MyMod" selected, return an error.
Mode::Normal => return show_dialog(&app_ui.window, false, "MyMod not selected."),
};
// If we deleted the "MyMod", we allow chaos to form below.
if mod_deleted {
// Set the current "Operational Mode" to `Normal`.
set_my_mod_mode(&app_ui, &mode, None);
// Replace the open PackFile with a dummy one, like during boot.
*pack_file_decoded.borrow_mut() = PackFile::new();
// Disable the actions available for the PackFile from the `MenuBar`.
enable_packfile_actions(&app_ui, &game_selected, false);
// Set the dummy mod as "Not modified".
set_modified(false, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
// Clear the TreeView.
app_ui.folder_tree_store.clear();
// Rebuild the "MyMod" menu.
build_my_mod_menu(
&application,
&app_ui,
&settings.borrow(),
&mode,
&schema,
&game_selected,
&supported_games,
&dependency_database,
&pack_file_decoded,
&pack_file_decoded_extra,
&rpfm_path
);
// Show the "MyMod" deleted Dialog.
show_dialog(&app_ui.window, true, format!("MyMod \"{}\" deleted.", old_mod_name));
}
}
}
));
// When we hit the "Install" button.
app_ui.menu_bar_my_mod_install.connect_activate(clone!(
app_ui,
mode,
game_selected,
settings => move |_,_| {
// Depending on our current "Mode", we choose what to do.
match *mode.borrow() {
// If we have a "MyMod" selected...
Mode::MyMod {ref game_folder_name, ref mod_name} => {
// And the "MyMod" path is configured...
if let Some(ref my_mods_base_path) = settings.borrow().paths.my_mods_base_path {
// Get the `game_data_path` of the game.
let game_data_path = game_selected.borrow().game_data_path.clone();
// If we have a `game_data_path` for the current `GameSelected`...
if let Some(mut game_data_path) = game_data_path {
// We get the "MyMod"s PackFile path.
let mut my_mod_path = my_mods_base_path.to_path_buf();
my_mod_path.push(&game_folder_name);
my_mod_path.push(&mod_name);
// We check that the "MyMod"s PackFile exists.
if !my_mod_path.is_file() {
return show_dialog(&app_ui.window, false, "PackFile doesn't exist.");
}
// We check that the destination path exists.
if !game_data_path.is_dir() {
return show_dialog(&app_ui.window, false, "Destination folder (..xxx/data) doesn't exist. You sure you configured the right folder for the game?");
}
// Get the destination path for the PackFile with the PackFile included.
game_data_path.push(&mod_name);
// And copy the PackFile to his destination. If the copy fails, return an error.
if let Err(error) = copy(my_mod_path, game_data_path).map_err(Error::from) {
return show_dialog(&app_ui.window, false, error.cause());
}
}
// If we don't have a `game_data_path` configured for the current `GameSelected`...
else {
return show_dialog(&app_ui.window, false, "Game folder path not configured.");
}
// If the "MyMod" path is not configured, return an error.
}
else {
show_dialog(&app_ui.window, false, "MyMod base path not configured.");
}
}
// If we have no MyMod selected, return an error.
Mode::Normal => show_dialog(&app_ui.window, false, "MyMod not selected."),
}
}
));
// When we hit the "Uninstall" button.
app_ui.menu_bar_my_mod_uninstall.connect_activate(clone!(
app_ui,
mode,
game_selected => move |_,_| {
// Depending on our current "Mode", we choose what to do.
match *mode.borrow() {
// If we have a "MyMod" selected...
Mode::MyMod {ref mod_name,..} => {
// Get the `game_data_path` of the game.
let game_data_path = game_selected.borrow().game_data_path.clone();
// If we have a `game_data_path` for the current `GameSelected`...
if let Some(mut game_data_path) = game_data_path {
// Get the destination path for the PackFile with the PackFile included.
game_data_path.push(&mod_name);
// We check that the "MyMod" is actually installed in the provided path.
if !game_data_path.is_file() {
return show_dialog(&app_ui.window, false, "The currently selected \"MyMod\" is not installed.");
}
// If the "MyMod" is installed, we remove it. If there is a problem deleting it, return an error dialog.
else if let Err(error) = remove_file(game_data_path).map_err(Error::from) {
return show_dialog(&app_ui.window, false, error.cause());
}
}
// If we don't have a `game_data_path` configured for the current `GameSelected`...
else {
show_dialog(&app_ui.window, false, "Game folder path not configured.");
}
}
// If we have no MyMod selected, return an error.
Mode::Normal => show_dialog(&app_ui.window, false, "MyMod not selected."),
}
}
));
/*
--------------------------------------------------------
Superior Menu: "Game Selected"
--------------------------------------------------------
*/
// When changing the selected game.
app_ui.menu_bar_change_game_selected.connect_activate(clone!(
app_ui,
rpfm_path,
schema,
mode,
settings,
supported_games,
pack_file_decoded,
dependency_database,
game_selected => move |menu_bar_change_game_selected, selected| {
// Get the new state of the action.
if let Some(state) = selected.clone() {
let new_state: String = state.get().unwrap();
// Change the state of the action.
menu_bar_change_game_selected.change_state(&new_state.to_variant());
// Change the `GameSelected` object.
game_selected.borrow_mut().change_game_selected(&new_state, &settings.borrow().paths.game_paths.iter().filter(|x| x.game == new_state).map(|x| x.path.clone()).collect::<Option<PathBuf>>(), &supported_games.borrow());
// Change the `Schema` for that game.
*schema.borrow_mut() = Schema::load(&rpfm_path, &supported_games.borrow().iter().filter(|x| x.folder_name == *game_selected.borrow().game).map(|x| x.schema.to_owned()).collect::<String>()).ok();
// Change the `dependency_database` for that game.
*dependency_database.borrow_mut() = match packfile::open_packfile(game_selected.borrow().game_dependency_packfile_path.to_path_buf()) {
Ok(data) => Some(data.data.packed_files),
Err(_) => None,
};
// If we have a PackFile opened....
if !pack_file_decoded.borrow().extra_data.file_name.is_empty() {
// Re-enable the "PackFile Management" actions, so the "Special Stuff" menu gets updated properly.
enable_packfile_actions(&app_ui, &game_selected, false);
enable_packfile_actions(&app_ui, &game_selected, true);
// Set the current "Operational Mode" to `Normal` (In case we were in `MyMod` mode).
set_my_mod_mode(&app_ui, &mode, None);
}
}
}
));
/*
--------------------------------------------------------
Superior Menu: "Special Stuff"
--------------------------------------------------------
*/
// When we hit the "Patch SiegeAI" button.
app_ui.menu_bar_patch_siege_ai_wh2.connect_activate(clone!(
app_ui,
pack_file_decoded => move |_,_| {
patch_siege_ai(&app_ui, &pack_file_decoded);
}
));
// When we hit the "Generate Dependency Pack" button.
app_ui.menu_bar_generate_dependency_pack_wh2.connect_activate(clone!(
app_ui,
rpfm_path,
game_selected => move |_,_| {
generate_dependency_pack(&app_ui, &rpfm_path, &game_selected);
}
));
// When we hit the "Create Map Prefab" button.
app_ui.menu_bar_create_map_prefab_wh2.connect_activate(clone!(
application,
app_ui,
pack_file_decoded,
game_selected => move |_,_| {
create_prefab(&application, &app_ui, &game_selected, &pack_file_decoded);
}
));
// When we hit the "Patch SiegeAI" button (Warhammer).
app_ui.menu_bar_patch_siege_ai_wh.connect_activate(clone!(
app_ui,
pack_file_decoded => move |_,_| {
patch_siege_ai(&app_ui, &pack_file_decoded);
}
));
// When we hit the "Generate Dependency Pack" button (Warhammer).
app_ui.menu_bar_generate_dependency_pack_wh.connect_activate(clone!(
game_selected,
rpfm_path,
app_ui => move |_,_| {
generate_dependency_pack(&app_ui, &rpfm_path, &game_selected);
}
));
// When we hit the "Create Map Prefab" button (Warhammer).
app_ui.menu_bar_create_map_prefab_wh.connect_activate(clone!(
application,
app_ui,
pack_file_decoded,
game_selected => move |_,_| {
create_prefab(&application, &app_ui, &game_selected, &pack_file_decoded);
}
));
// When we hit the "Generate Dependency Pack" button (Attila).
app_ui.menu_bar_generate_dependency_pack_att.connect_activate(clone!(
app_ui,
rpfm_path,
game_selected => move |_,_| {
generate_dependency_pack(&app_ui, &rpfm_path, &game_selected);
}
));
/*
--------------------------------------------------------
Superior Menu: "About"
--------------------------------------------------------
*/
// When we hit the "Check Updates" button.
app_ui.menu_bar_check_updates.connect_activate(clone!(
app_ui => move |_,_| {
check_updates(VERSION, Some(&app_ui.window), None);
}
));
// When we hit the "Check Schema Updates" button.
app_ui.menu_bar_check_schema_updates.connect_activate(clone!(
supported_games,
game_selected,
rpfm_path,
schema,
app_ui => move |_,_| {
check_schema_updates(VERSION, &rpfm_path, &supported_games.borrow(), &game_selected, &schema, Some(&app_ui.window), None);
}
));
// When we hit the "Support me on Patreon" button.
app_ui.menu_bar_open_patreon.connect_activate(move |_,_| {
// I doubt GTK allows to put a LinkButton in the Menubar so... time to be creative.
let link_button = LinkButton::new("https://www.patreon.com/RPFM");
link_button.emit("activate-link", &[]).unwrap();
});
// When we hit the "About" button.
app_ui.menu_bar_about.connect_activate(clone!(
rpfm_path,
app_ui => move |_,_| {
show_about_window(VERSION, &rpfm_path, &app_ui.window);
}
));
/*
--------------------------------------------------------
Contextual TreeView Popup
--------------------------------------------------------
*/
// When we right-click the TreeView, we calculate the position where the popup must aim, and show it.
//
// NOTE: REMEMBER, WE OPEN THE POPUP HERE, BUT WE NEED TO CLOSE IT WHEN WE HIT HIS BUTTONS.
app_ui.folder_tree_view.connect_button_release_event(clone!(
app_ui => move |_,button| {
// If we Right-Click and there is something selected...
if button.get_button() == 3 && app_ui.folder_tree_selection.count_selected_rows() > 0 {
// Get a Rectangle over the selected line, and popup the Contextual Menu.
let rect = get_rect_for_popover(&app_ui.folder_tree_view, Some(button.get_position()));
app_ui.folder_tree_view_context_menu.set_pointing_to(&rect);
app_ui.folder_tree_view_context_menu.popup();
}
Inhibit(false)
}
));
// We check every action possible for the selected file when changing the cursor.
app_ui.folder_tree_view.connect_cursor_changed(clone!(
dependency_database,
pack_file_decoded,
schema,
app_ui => move |_| {
// Get the path of the selected thing.
let tree_path = get_tree_path_from_selection(&app_ui.folder_tree_selection, true);
// Get the type of the selected thing.
let selection_type = get_type_of_selected_tree_path(&tree_path, &pack_file_decoded.borrow());
// Depending on the type of the selected thing, we enable or disable different actions.
match selection_type {
// If it's a file...
TreePathType::File(_) => {
app_ui.folder_tree_view_add_file.set_enabled(false);
app_ui.folder_tree_view_add_folder.set_enabled(false);
app_ui.folder_tree_view_add_from_packfile.set_enabled(false);
app_ui.folder_tree_view_rename_packedfile.set_enabled(true);
app_ui.folder_tree_view_delete_packedfile.set_enabled(true);
app_ui.folder_tree_view_extract_packedfile.set_enabled(true);
app_ui.folder_tree_view_create_loc.set_enabled(false);
app_ui.folder_tree_view_create_db.set_enabled(false);
app_ui.folder_tree_view_create_text.set_enabled(false);
app_ui.folder_tree_view_mass_import_tsv_files.set_enabled(false);
},
// If it's a folder...
TreePathType::Folder(_) => {
app_ui.folder_tree_view_add_file.set_enabled(true);
app_ui.folder_tree_view_add_folder.set_enabled(true);
app_ui.folder_tree_view_add_from_packfile.set_enabled(true);
app_ui.folder_tree_view_rename_packedfile.set_enabled(true);
app_ui.folder_tree_view_delete_packedfile.set_enabled(true);
app_ui.folder_tree_view_extract_packedfile.set_enabled(true);
app_ui.folder_tree_view_create_loc.set_enabled(true);
app_ui.folder_tree_view_create_db.set_enabled(true);
app_ui.folder_tree_view_create_text.set_enabled(true);
app_ui.folder_tree_view_mass_import_tsv_files.set_enabled(true);
},
// If it's the PackFile...
TreePathType::PackFile => {
app_ui.folder_tree_view_add_file.set_enabled(true);
app_ui.folder_tree_view_add_folder.set_enabled(true);
app_ui.folder_tree_view_add_from_packfile.set_enabled(true);
app_ui.folder_tree_view_rename_packedfile.set_enabled(false);
app_ui.folder_tree_view_delete_packedfile.set_enabled(true);
app_ui.folder_tree_view_extract_packedfile.set_enabled(true);
app_ui.folder_tree_view_create_loc.set_enabled(true);
app_ui.folder_tree_view_create_db.set_enabled(true);
app_ui.folder_tree_view_create_text.set_enabled(true);
app_ui.folder_tree_view_mass_import_tsv_files.set_enabled(true);
},
// If there is nothing selected...
TreePathType::None => {
app_ui.folder_tree_view_add_file.set_enabled(false);
app_ui.folder_tree_view_add_folder.set_enabled(false);
app_ui.folder_tree_view_add_from_packfile.set_enabled(false);
app_ui.folder_tree_view_rename_packedfile.set_enabled(false);
app_ui.folder_tree_view_delete_packedfile.set_enabled(false);
app_ui.folder_tree_view_extract_packedfile.set_enabled(false);
app_ui.folder_tree_view_create_loc.set_enabled(false);
app_ui.folder_tree_view_create_db.set_enabled(false);
app_ui.folder_tree_view_create_text.set_enabled(false);
app_ui.folder_tree_view_mass_import_tsv_files.set_enabled(false);
},
}
// If there is no dependency_database or schema for our GameSelected, ALWAYS disable creating new DB Tables.
if dependency_database.borrow().is_none() || schema.borrow().is_none() {
app_ui.folder_tree_view_create_db.set_enabled(false);
app_ui.folder_tree_view_mass_import_tsv_files.set_enabled(false);
}
}
));
// When we hit the "Add file" button.
app_ui.folder_tree_view_add_file.connect_activate(clone!(
app_ui,
settings,
mode,
pack_file_decoded => move |_,_| {
// First, we hide the context menu.
app_ui.folder_tree_view_context_menu.popdown();
// We only do something in case the focus is in the TreeView. This should stop problems with
// the accels working everywhere.
if app_ui.folder_tree_view.has_focus() {
// Create our `FileChooser` to select the files to add.
let file_chooser_add_file_to_packfile = FileChooserNative::new(
"Select File...",
&app_ui.window,
FileChooserAction::Open,
"Accept",
"Cancel"
);
// Allow to select multiple files at the same time.
file_chooser_add_file_to_packfile.set_select_multiple(true);
// Check the current "Operational Mode".
match *mode.borrow() {
// If we are in "MyMod" mode...
Mode::MyMod {ref game_folder_name, ref mod_name} => {
// In theory, if we reach this line this should always exist. In theory I should be rich.
if let Some(ref my_mods_base_path) = settings.borrow().paths.my_mods_base_path {
// We get the assets path for the selected "MyMod".
let mut my_mod_path = my_mods_base_path.to_path_buf();
my_mod_path.push(&game_folder_name);
my_mod_path.push(Path::new(&mod_name).file_stem().unwrap().to_string_lossy().as_ref().to_owned());
// We check that path exists, and create it if it doesn't.
if !my_mod_path.is_dir() {
match DirBuilder::new().create(&my_mod_path) {
Ok(_) | Err(_) => { /* This returns ok if it created the folder and err if it already exist. */ }
};
}
// Then we set that path as current path for the "Add PackedFile" file chooser.
file_chooser_add_file_to_packfile.set_current_folder(&my_mod_path);
// If we hit "Accept"...
if file_chooser_add_file_to_packfile.run() == gtk_response_accept {
// Get the names of the files to add.
let paths = file_chooser_add_file_to_packfile.get_filenames();
// For each one of them...
for path in &paths {
// If we are inside the mod's folder, we need to "emulate" the path to then
// file in the TreeView, so we add the file with a custom tree_path.
if path.starts_with(&my_mod_path) {
// Turn both paths into `Vec<String>`, so we can compare them better.
let path_vec = path.iter().map(|t| t.to_str().unwrap().to_string()).collect::<Vec<String>>();
let my_mod_path_vec = my_mod_path.iter().map(|t| t.to_str().unwrap().to_string()).collect::<Vec<String>>();
// Get the index from where his future tree_path starts.
let index = my_mod_path_vec.len();
// Get his `TreeView` tree_path.
let tree_path = path_vec[index..].to_vec();
// Try to add it to the PackFile.
let success = match packfile::add_file_to_packfile(&mut *pack_file_decoded.borrow_mut(), path, tree_path.to_vec()) {
Ok(_) => true,
Err(error) => {
show_dialog(&app_ui.window, false, error.cause());
false
}
};
// If we had success adding it...
if success {
// Set the mod as "Modified".
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
// Update the TreeView to show the newly added PackedFile.
update_treeview(
&app_ui.folder_tree_store,
&*pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::Add(tree_path.to_vec()),
&TreePathType::None,
);
}
}
// If not, we get their tree_path like a normal file.
else {
// Get his `TreeView` path.
let tree_path = get_tree_path_from_pathbuf(path, &app_ui.folder_tree_selection, true);
// Try to add it to the PackFile.
let success = match packfile::add_file_to_packfile(&mut *pack_file_decoded.borrow_mut(), path, tree_path.to_vec()) {
Ok(_) => true,
Err(error) => {
show_dialog(&app_ui.window, false, error.cause());
false
}
};
// If we had success adding it...
if success {
// Set the mod as "Modified".
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
// Update the TreeView to show the newly added PackedFile.
update_treeview(
&app_ui.folder_tree_store,
&*pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::Add(tree_path.to_vec()),
&TreePathType::None,
);
}
}
}
}
}
else {
return show_dialog(&app_ui.window, false, "MyMod base folder not configured.");
}
},
// If there is no "MyMod" selected, we just keep the normal behavior.
Mode::Normal => {
// If we hit the "Accept" button...
if file_chooser_add_file_to_packfile.run() == gtk_response_accept {
// Get all the files selected.
let paths = file_chooser_add_file_to_packfile.get_filenames();
// For each file to add...
for path in &paths {
// Get his `TreeView` path.
let tree_path = get_tree_path_from_pathbuf(path, &app_ui.folder_tree_selection, true);
// Try to add it to the PackFile.
let success = match packfile::add_file_to_packfile(&mut *pack_file_decoded.borrow_mut(), path, tree_path.to_vec()) {
Ok(_) => true,
Err(error) => {
show_dialog(&app_ui.window, false, error.cause());
false
}
};
// If we had success adding it...
if success {
// Set the mod as "Modified".
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
// Update the TreeView to show the newly added PackedFile.
update_treeview(
&app_ui.folder_tree_store,
&*pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::Add(tree_path.to_vec()),
&TreePathType::None,
);
}
}
}
},
}
}
}));
// When we hit the "Add folder" button.
app_ui.folder_tree_view_add_folder.connect_activate(clone!(
app_ui,
settings,
mode,
pack_file_decoded => move |_,_| {
// First, we hide the context menu.
app_ui.folder_tree_view_context_menu.popdown();
// We only do something in case the focus is in the TreeView. This should stop problems with
// the accels working everywhere.
if app_ui.folder_tree_view.has_focus() {
// Create the `FileChooser`.
let file_chooser_add_folder_to_packfile = FileChooserNative::new(
"Select Folder...",
&app_ui.window,
FileChooserAction::SelectFolder,
"Accept",
"Cancel"
);
// Allow to select multiple folders at the same time.
file_chooser_add_folder_to_packfile.set_select_multiple(true);
// Check the current "Operational Mode".
match *mode.borrow() {
// If the current mode is "MyMod"...
Mode::MyMod {ref game_folder_name, ref mod_name} => {
// In theory, if we reach this line this should always exist. In theory I should be rich.
if let Some(ref my_mods_base_path) = settings.borrow().paths.my_mods_base_path {
// We get the assets path for the selected "MyMod".
let mut my_mod_path = my_mods_base_path.to_path_buf();
my_mod_path.push(&game_folder_name);
my_mod_path.push(Path::new(&mod_name).file_stem().unwrap().to_string_lossy().as_ref().to_owned());
// We check that path exists, and create it if it doesn't.
if !my_mod_path.is_dir() {
match DirBuilder::new().create(&my_mod_path) {
Ok(_) | Err(_) => { /* This returns ok if it created the folder and err if it already exist. */ }
};
}
// Then we set that path as current path for the "Add PackedFile" file chooser.
file_chooser_add_folder_to_packfile.set_current_folder(&my_mod_path);
// If we hit "Accept"...
if file_chooser_add_folder_to_packfile.run() == gtk_response_accept {
// Get the selected folders.
let folders = file_chooser_add_folder_to_packfile.get_filenames();
// For each folder...
for folder in &folders {
// If we are inside the mod's folder, we need to "emulate" the path to then
// file in the TreeView, so we add the file with a custom tree_path.
if folder.starts_with(&my_mod_path) {
// Turn both paths into `Vec<String>`, so we can compare them better.
let path_vec = folder.iter().map(|t| t.to_str().unwrap().to_string()).collect::<Vec<String>>();
let my_mod_path_vec = my_mod_path.iter().map(|t| t.to_str().unwrap().to_string()).collect::<Vec<String>>();
// Get the index from where his future tree_path starts.
let index = my_mod_path_vec.len();
// Get his `TreeView` tree_path.
let tree_path = path_vec[index..].to_vec();
// Get the "Prefix" of the folder.
let mut big_parent_prefix = folder.clone();
big_parent_prefix.pop();
// Get all the files inside that folder recursively.
match get_files_from_subdir(folder) {
// If we succeed...
Ok(file_path_list) => {
// For each file...
for file_path in file_path_list {
// Remove his prefix, leaving only the path from the folder onwards.
match file_path.strip_prefix(&big_parent_prefix) {
// If there is no problem...
Ok(filtered_path) => {
// Then get their unique tree_path, combining our current tree_path
// with the filtered_path we got for them.
let mut filtered_path = filtered_path.iter().map(|t| t.to_str().unwrap().to_string()).collect::<Vec<String>>();
let mut tree_path = tree_path.clone();
tree_path.pop();
tree_path.append(&mut filtered_path);
// Try to add it to the PackFile.
let success = match packfile::add_file_to_packfile(&mut *pack_file_decoded.borrow_mut(), &file_path.to_path_buf(), tree_path.to_vec()) {
Ok(_) => true,
Err(error) => {
show_dialog(&app_ui.window, false, error.cause());
false
}
};
// If we had success adding it...
if success {
// Set the mod as "Modified".
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
// Update the TreeView to show the newly added PackedFile.
update_treeview(
&app_ui.folder_tree_store,
&*pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::Add(tree_path.to_vec()),
&TreePathType::None,
);
}
}
// If there is an error while removing the prefix...
Err(_) => show_dialog(&app_ui.window, false, format!("Error adding the following file to the PackFile:\n\n{:?}\n\nThe file's path doesn't start with {:?}", file_path, big_parent_prefix)),
}
}
}
// If there is an error while getting the files to add...
Err(_) => show_dialog(&app_ui.window, false, "Error while getting the files to add to the PackFile."),
}
}
// If not, we get their tree_path like a normal folder.
else {
// Get the "Prefix" of the folder.
let mut big_parent_prefix = folder.clone();
big_parent_prefix.pop();
// Get all the files inside that folder recursively.
match get_files_from_subdir(folder) {
// If we succeed...
Ok(file_path_list) => {
// For each file...
for file_path in file_path_list {
// Remove his prefix, leaving only the path from the folder onwards.
match file_path.strip_prefix(&big_parent_prefix) {
// If there is no problem...
Ok(filtered_path) => {
// Get his `tree_path`.
let tree_path = get_tree_path_from_pathbuf(&filtered_path.to_path_buf(), &app_ui.folder_tree_selection, false);
// Try to add it to the PackFile.
let success = match packfile::add_file_to_packfile(&mut *pack_file_decoded.borrow_mut(), &file_path.to_path_buf(), tree_path.to_vec()) {
Ok(_) => true,
Err(error) => {
show_dialog(&app_ui.window, false, error.cause());
false
}
};
// If we had success adding it...
if success {
// Set the mod as "Modified".
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
// Update the TreeView to show the newly added PackedFile.
update_treeview(
&app_ui.folder_tree_store,
&*pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::Add(tree_path.to_vec()),
&TreePathType::None,
);
}
}
// If there is an error while removing the prefix...
Err(_) => show_dialog(&app_ui.window, false, format!("Error adding the following file to the PackFile:\n\n{:?}\n\nThe file's path doesn't start with {:?}", file_path, big_parent_prefix)),
}
}
}
// If there is an error while getting the files to add...
Err(_) => show_dialog(&app_ui.window, false, "Error while getting the files to add to the PackFile."),
}
}
}
}
}
else {
return show_dialog(&app_ui.window, false, "MyMod base folder not configured.");
}
}
// If there is no "MyMod" selected, we just keep the normal behavior.
Mode::Normal => {
// If we hit "Accept"...
if file_chooser_add_folder_to_packfile.run() == gtk_response_accept {
// Get the folders we want to add.
let folders = file_chooser_add_folder_to_packfile.get_filenames();
// For each folder...
for folder in &folders {
// Get the "Prefix" of the folder.
let mut big_parent_prefix = folder.clone();
big_parent_prefix.pop();
// Get all the files inside that folder recursively.
match get_files_from_subdir(folder) {
// If we succeed...
Ok(file_path_list) => {
// For each file...
for file_path in file_path_list {
// Remove his prefix, leaving only the path from the folder onwards.
match file_path.strip_prefix(&big_parent_prefix) {
// If there is no problem...
Ok(filtered_path) => {
// Get his `tree_path`.
let tree_path = get_tree_path_from_pathbuf(&filtered_path.to_path_buf(), &app_ui.folder_tree_selection, false);
// Try to add it to the PackFile.
let success = match packfile::add_file_to_packfile(&mut *pack_file_decoded.borrow_mut(), &file_path.to_path_buf(), tree_path.to_vec()) {
Ok(_) => true,
Err(error) => {
show_dialog(&app_ui.window, false, error.cause());
false
}
};
// If we had success adding it...
if success {
// Set the mod as "Modified".
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
// Update the TreeView to show the newly added PackedFile.
update_treeview(
&app_ui.folder_tree_store,
&*pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::Add(tree_path.to_vec()),
&TreePathType::None,
);
}
}
// If there is an error while removing the prefix...
Err(_) => show_dialog(&app_ui.window, false, format!("Error adding the following file to the PackFile:\n\n{:?}\n\nThe file's path doesn't start with {:?}", file_path, big_parent_prefix)),
}
}
}
// If there is an error while getting the files to add...
Err(_) => show_dialog(&app_ui.window, false, "Error while getting the files to add to the PackFile."),
}
}
}
}
}
}
}
));
// When we hit the "Add file/folder from PackFile" button.
app_ui.folder_tree_view_add_from_packfile.connect_activate(clone!(
app_ui,
pack_file_decoded,
pack_file_decoded_extra,
is_folder_tree_view_locked => move |_,_| {
// First, we hide the context menu.
app_ui.folder_tree_view_context_menu.popdown();
// We only do something in case the focus is in the TreeView. This should stop problems with
// the accels working everywhere.
if app_ui.folder_tree_view.has_focus() {
// Then, we destroy any children that the packed_file_data_display we use may have, cleaning it.
let childrens_to_utterly_destroy = app_ui.packed_file_data_display.get_children();
if !childrens_to_utterly_destroy.is_empty() {
for i in &childrens_to_utterly_destroy {
i.destroy();
}
}
// Create the `FileChooser`.
let file_chooser_add_from_packfile = FileChooserNative::new(
"Select PackFile...",
&app_ui.window,
FileChooserAction::Open,
"Accept",
"Cancel"
);
// Set his filter to only admit ".pack" files.
file_chooser_filter_packfile(&file_chooser_add_from_packfile, "*.pack");
// If we hit "Accept"...
if file_chooser_add_from_packfile.run() == gtk_response_accept {
// Try to open the selected PackFile.
match packfile::open_packfile_with_bufreader(file_chooser_add_from_packfile.get_filename().unwrap()) {
// If the extra PackFile is valid...
Ok(result) => {
// Separate the result.
let pack_file_opened = result.0;
let mut buffer = Rc::new(RefCell::new(result.1));
// We create the "Exit" and "Copy" buttons.
let exit_button = Button::new_with_label("Exit \"Add file/folder from PackFile\" mode");
let copy_button = Button::new_with_label("<=");
exit_button.set_vexpand(false);
copy_button.set_hexpand(false);
// Paint the fucking button pink, because people keeps complaining they don't see it.
StyleContext::add_class(©_button.get_style_context().unwrap(), "suggested-action");
// We attach them to the main grid.
app_ui.packed_file_data_display.attach(&exit_button, 0, 0, 2, 1);
app_ui.packed_file_data_display.attach(©_button, 0, 1, 1, 1);
// We create the new TreeView (in a ScrolledWindow) and his TreeStore.
let folder_tree_view_extra = TreeView::new();
let folder_tree_store_extra = TreeStore::new(&[String::static_type()]);
folder_tree_view_extra.set_model(Some(&folder_tree_store_extra));
// We create his column.
let column_extra = TreeViewColumn::new();
let cell_extra = CellRendererText::new();
column_extra.pack_start(&cell_extra, true);
column_extra.add_attribute(&cell_extra, "text", 0);
// Configuration for the `TreeView`.
folder_tree_view_extra.append_column(&column_extra);
folder_tree_view_extra.set_enable_tree_lines(true);
folder_tree_view_extra.set_enable_search(false);
folder_tree_view_extra.set_headers_visible(false);
// We create an `ScrolledWindow` for the `TreeView`.
let folder_tree_view_extra_scroll = ScrolledWindow::new(None, None);
folder_tree_view_extra_scroll.set_hexpand(true);
folder_tree_view_extra_scroll.set_vexpand(true);
folder_tree_view_extra_scroll.add(&folder_tree_view_extra);
app_ui.packed_file_data_display.attach(&folder_tree_view_extra_scroll, 1, 1, 1, 1);
// Show everything.
app_ui.packed_file_data_display.show_all();
// Block the main `TreeView` from decoding stuff.
*is_folder_tree_view_locked.borrow_mut() = true;
// Store the second PackFile's data.
*pack_file_decoded_extra.borrow_mut() = pack_file_opened;
// Build the second `TreeView`.
update_treeview(
&folder_tree_store_extra,
&*pack_file_decoded_extra.borrow(),
&folder_tree_view_extra.get_selection(),
TreeViewOperation::Build,
&TreePathType::None,
);
// We need to check here if the selected destination is not a file. Otherwise,
// we should disable the "Copy" button.
app_ui.folder_tree_selection.connect_changed(clone!(
copy_button,
pack_file_decoded => move |folder_tree_selection| {
// Get his path.
let tree_path = get_tree_path_from_selection(folder_tree_selection, true);
// Only in case it's not a file, we enable the "Copy" Button.
match get_type_of_selected_tree_path(&tree_path, &*pack_file_decoded.borrow()) {
TreePathType::File(_) => copy_button.set_sensitive(false),
_ => copy_button.set_sensitive(true),
}
}
));
// When we click in the "Copy" button (<=).
copy_button.connect_button_release_event(clone!(
app_ui,
buffer,
pack_file_decoded,
pack_file_decoded_extra,
folder_tree_view_extra => move |_,_| {
// Get his source & destination paths.
let tree_path_source = get_tree_path_from_selection(&folder_tree_view_extra.get_selection(), true);
let tree_path_destination = get_tree_path_from_selection(&app_ui.folder_tree_selection, true);
// Get the destination type.
let selection_type = get_type_of_selected_tree_path(&tree_path_destination, &pack_file_decoded.borrow());
// Try to add the PackedFile to the main PackFile.
let success = match packfile::add_packedfile_to_packfile(
&mut buffer.borrow_mut(),
&*pack_file_decoded_extra.borrow(),
&mut *pack_file_decoded.borrow_mut(),
&tree_path_source,
&tree_path_destination,
) {
Ok(_) => true,
Err(error) => {
show_dialog(&app_ui.window, false, error.cause());
false
}
};
// If it succeed...
if success {
// Set the mod as "Modified".
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
// Get the new "Prefix" for the PackedFiles.
let mut source_prefix = tree_path_source;
// Remove the PackFile's name from it.
source_prefix.reverse();
source_prefix.pop();
source_prefix.reverse();
// Get the new "Prefix" for the Destination PackedFiles.
let mut destination_prefix = tree_path_destination;
// Remove the PackFile's name from it.
destination_prefix.reverse();
destination_prefix.pop();
destination_prefix.reverse();
// Get all the PackedFiles to copy.
let path_list: Vec<Vec<String>> = pack_file_decoded_extra.borrow()
.data.packed_files
.iter()
.filter(|x| x.path.starts_with(&source_prefix))
.map(|x| x.path.to_vec())
.collect();
// Update the TreeView to show the newly added PackedFiles.
update_treeview(
&app_ui.folder_tree_store,
&*pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::AddFromPackFile(source_prefix.to_vec(), destination_prefix.to_vec(), path_list),
&selection_type,
);
}
Inhibit(false)
}
));
// When we click in the "Exit "Add file/folder from PackFile" mode" button.
exit_button.connect_button_release_event(clone!(
app_ui,
pack_file_decoded_extra,
is_folder_tree_view_locked => move |_,_| {
// Remove the `pack_file_decoded_extra` from memory.
*pack_file_decoded_extra.borrow_mut() = PackFile::new();
// Unlock the `TreeView`.
*is_folder_tree_view_locked.borrow_mut() = false;
// We need to destroy any children that the packed_file_data_display we use may have, cleaning it.
let children_to_utterly_destroy = app_ui.packed_file_data_display.get_children();
if !children_to_utterly_destroy.is_empty() {
for i in &children_to_utterly_destroy {
i.destroy();
}
}
// Show the "Tips".
display_help_tips(&app_ui.packed_file_data_display);
Inhibit(false)
}
));
}
Err(error) => show_dialog(&app_ui.window, false, error.cause()),
}
}
}
}
));
// The "Rename" action requires multiple things to happend, so we group them together.
{
let old_snake = Rc::new(RefCell::new(String::new()));
// When we hit the "Rename file/folder" button, we start editing the file we want to rename.
app_ui.folder_tree_view_rename_packedfile.connect_activate(clone!(
app_ui,
old_snake,
pack_file_decoded => move |_,_|{
// We hide the context menu.
app_ui.folder_tree_view_context_menu.popdown();
// We only do something in case the focus is in the TreeView. This should stop problems with
// the accels working everywhere.
if app_ui.folder_tree_view.has_focus() {
// If we have at least one file selected...
if app_ui.folder_tree_selection.get_selected_rows().0.len() >= 1 {
// If the selected file/folder turns out to be the PackFile, stop right there, criminal scum.
let tree_path = get_tree_path_from_selection(&app_ui.folder_tree_selection, true);
if let TreePathType::PackFile = get_type_of_selected_tree_path(&tree_path, &*pack_file_decoded.borrow()) {
return
}
// Set the cells to "Editable" mode, so we can edit them.
app_ui.folder_tree_view_cell.set_property_mode(CellRendererMode::Editable);
// Get the `TreePath` of what we want to rename.
let tree_path: TreePath = app_ui.folder_tree_selection.get_selected_rows().0[0].clone();
// Get the old name of the file/folder, for restoring purpouses.
let tree_iter = app_ui.folder_tree_store.get_iter(&tree_path).unwrap();
*old_snake.borrow_mut() = app_ui.folder_tree_store.get_value(&tree_iter, 0).get().unwrap();
// Start editing the name at the selected `TreePath`.
app_ui.folder_tree_view.set_cursor(&tree_path, Some(&app_ui.folder_tree_view_column), true);
}
}
}
));
// When the edition is finished...
app_ui.folder_tree_view_cell.connect_edited(clone!(
pack_file_decoded,
old_snake,
app_ui => move |cell,_, new_name| {
// Get the `tree_path` of the selected file/folder...
let tree_path = get_tree_path_from_selection(&app_ui.folder_tree_selection, true);
// Get his type.
let selection_type = get_type_of_selected_tree_path(&tree_path, &pack_file_decoded.borrow());
// And try to rename it.
let success = match packfile::rename_packed_file(&mut *pack_file_decoded.borrow_mut(), &tree_path, new_name) {
Ok(_) => true,
Err(error) => {
show_dialog(&app_ui.window, false, error.cause());
false
}
};
// If we renamed the file/folder successfully...
if success {
// Set the mod as "Modified".
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
// Rename whatever is selected (and his childs, if it have any) from the `TreeView`.
update_treeview(
&app_ui.folder_tree_store,
&*pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::Rename(new_name.to_owned()),
&selection_type,
);
}
// If we didn't rename the file, restore his old name.
else {
cell.set_property_text(Some(&old_snake.borrow()));
}
// Set the cells back to "Activatable" mode.
cell.set_property_mode(CellRendererMode::Activatable);
}
));
// When the edition is canceled...
app_ui.folder_tree_view_cell.connect_editing_canceled(move |cell| {
// Set the cells back to "Activatable" mode.
cell.set_property_mode(CellRendererMode::Activatable);
}
);
}
// When we hit the "Create Loc File" button.
app_ui.folder_tree_view_create_loc.connect_activate(clone!(
dependency_database,
pack_file_decoded,
application,
rpfm_path,
schema,
app_ui => move |_,_| {
// We hide the context menu, then we get the selected file/folder, delete it and update the
// TreeView. Pretty simple, actually.
app_ui.folder_tree_view_context_menu.popdown();
// We only do something in case the focus is in the TreeView. This should stop problems with
// the accels working everywhere.
if app_ui.folder_tree_view.has_focus() {
// Build the "Create Loc File" window.
show_create_packed_file_window(&application, &app_ui, &rpfm_path, &pack_file_decoded, PackedFileType::Loc, &dependency_database, &schema);
}
}
));
// When we hit the "Create DB Table" button.
app_ui.folder_tree_view_create_db.connect_activate(clone!(
dependency_database,
pack_file_decoded,
application,
rpfm_path,
schema,
app_ui => move |_,_| {
// We hide the context menu, then we get the selected file/folder, delete it and update the
// TreeView. Pretty simple, actually.
app_ui.folder_tree_view_context_menu.popdown();
// We only do something in case the focus is in the TreeView. This should stop problems with
// the accels working everywhere.
if app_ui.folder_tree_view.has_focus() {
// Build the "Create DB Table" window.
show_create_packed_file_window(&application, &app_ui, &rpfm_path, &pack_file_decoded, PackedFileType::DB, &dependency_database, &schema);
}
}
));
// When we hit the "Create Text File" button.
app_ui.folder_tree_view_create_text.connect_activate(clone!(
dependency_database,
pack_file_decoded,
application,
rpfm_path,
schema,
app_ui => move |_,_| {
// We hide the context menu, then we get the selected file/folder, delete it and update the
// TreeView. Pretty simple, actually.
app_ui.folder_tree_view_context_menu.popdown();
// We only do something in case the focus is in the TreeView. This should stop problems with
// the accels working everywhere.
if app_ui.folder_tree_view.has_focus() {
// Build the "Create Text File" window.
show_create_packed_file_window(&application, &app_ui, &rpfm_path, &pack_file_decoded, PackedFileType::Text, &dependency_database, &schema);
}
}
));
// When we hit the "Mass-Import TSV Files" button.
app_ui.folder_tree_view_mass_import_tsv_files.connect_activate(clone!(
pack_file_decoded,
application,
rpfm_path,
schema,
app_ui => move |_,_| {
// We hide the context menu, then we get the selected file/folder, delete it and update the
// TreeView. Pretty simple, actually.
app_ui.folder_tree_view_context_menu.popdown();
// We only do something in case the focus is in the TreeView. This should stop problems with
// the accels working everywhere.
if app_ui.folder_tree_view.has_focus() {
// Build the "Mass-Import TSV Files" window.
show_tsv_mass_import_window(&application, &app_ui, &rpfm_path, &pack_file_decoded, &schema);
}
}
));
// When we hit the "Delete file/folder" button.
app_ui.folder_tree_view_delete_packedfile.connect_activate(clone!(
app_ui,
is_packedfile_opened,
pack_file_decoded => move |_,_|{
// We hide the context menu, then we get the selected file/folder, delete it and update the
// TreeView. Pretty simple, actually.
app_ui.folder_tree_view_context_menu.popdown();
// If there is a PackedFile opened, we show a message with the explanation of why we can't
// delete the selected file/folder.
if *is_packedfile_opened.borrow() {
show_dialog(&app_ui.window, false, "You can't delete a PackedFile/Folder while there is a PackedFile opened in the right side. Pls close it by clicking in a Folder/PackFile before trying to delete it again.")
}
// Otherwise, we continue the deletion process.
else {
// We only do something in case the focus is in the TreeView. This should stop problems with
// the accels working everywhere.
if app_ui.folder_tree_view.has_focus() {
// Get his `tree_path`.
let tree_path = get_tree_path_from_selection(&app_ui.folder_tree_selection, true);
// Get his type.
let selection_type = get_type_of_selected_tree_path(&tree_path, &pack_file_decoded.borrow());
// Try to delete whatever is selected.
let success = match packfile::delete_from_packfile(&mut *pack_file_decoded.borrow_mut(), &tree_path) {
Ok(_) => true,
Err(error) => {
show_dialog(&app_ui.window, false, error.cause());
false
}
};
// If we succeed...
if success {
// Set the mod as "Modified".
set_modified(true, &app_ui.window, &mut *pack_file_decoded.borrow_mut());
// Remove whatever is selected (and his childs, if it have any) from the `TreeView`.
update_treeview(
&app_ui.folder_tree_store,
&*pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::Delete,
&selection_type,
);
}
}
}
}
));
// When we hit the "Extract file/folder" button.
app_ui.folder_tree_view_extract_packedfile.connect_activate(clone!(
app_ui,
settings,
mode,
pack_file_decoded => move |_,_|{
// First, we hide the context menu.
app_ui.folder_tree_view_context_menu.popdown();
// We only do something in case the focus is in the TreeView. This should stop problems with
// the accels working everywhere.
if app_ui.folder_tree_view.has_focus() {
// Get the selected path, both in complete and incomplete forms.
let tree_path = get_tree_path_from_selection(&app_ui.folder_tree_selection, true);
let mut tree_path_incomplete = tree_path.to_vec();
tree_path_incomplete.reverse();
tree_path_incomplete.pop();
tree_path_incomplete.reverse();
// Get the type of the selection.
let selection_type = get_type_of_selected_tree_path(&tree_path, &*pack_file_decoded.borrow());
// Check the current "Operational Mode".
match *mode.borrow() {
// If we have a "MyMod" selected...
Mode::MyMod {ref game_folder_name, ref mod_name} => {
// In theory, if we reach this line this should always exist. In theory I should be rich.
if let Some(ref my_mods_base_path) = settings.borrow().paths.my_mods_base_path {
// We get the assets folder of our mod.
let mut my_mod_path = my_mods_base_path.to_path_buf();
my_mod_path.push(&game_folder_name);
my_mod_path.push(Path::new(&mod_name).file_stem().unwrap().to_string_lossy().as_ref().to_owned());
// We check that path exists, and create it if it doesn't.
if !my_mod_path.is_dir() {
match DirBuilder::new().create(&my_mod_path) {
Ok(_) | Err(_) => { /* This returns ok if it created the folder and err if it already exist. */ }
};
}
// Create the path for the final destination of the file.
let mut extraction_final_folder = my_mod_path.to_path_buf();
// If it's a file or a folder...
if selection_type == TreePathType::File((vec![String::new()], 1)) || selection_type == TreePathType::Folder(vec![String::new()]) {
// If it's a folder, remove the last directory, as that one will be created when extracting.
if selection_type == TreePathType::Folder(vec![String::new()]) { tree_path_incomplete.pop(); }
// For each folder in his path...
for (index, folder) in tree_path_incomplete.iter().enumerate() {
// Complete the extracted path.
extraction_final_folder.push(folder);
// The last thing in the path is the new file, so we don't have to
// create a folder for it.
if index < (tree_path_incomplete.len() - 1) {
match DirBuilder::new().create(&extraction_final_folder) {
Ok(_) | Err(_) => { /* This returns ok if it created the folder and err if it already exist. */ }
};
}
}
}
// And finally, we extract our file to the desired destiny.
match packfile::extract_from_packfile(
&*pack_file_decoded.borrow(),
&tree_path,
&extraction_final_folder
) {
Ok(result) => show_dialog(&app_ui.window, true, result),
Err(error) => show_dialog(&app_ui.window, false, error.cause())
}
}
// If there is no "MyMod" path configured, report it.
else {
return show_dialog(&app_ui.window, false, "MyMod base path not configured.");
}
}
// If there is no "MyMod" selected, extract normally.
Mode::Normal => {
// Create the `FileChooser`.
let file_chooser_extract =
// If we have selected a file...
if selection_type == TreePathType::File((vec![String::new()], 1)) {
// Create a `FileChooser` to extract files.
let file_chooser = FileChooserNative::new(
"Select File destination...",
&app_ui.window,
FileChooserAction::Save,
"Extract",
"Cancel"
);
// We want to ask before overwriting files. Just in case. Otherwise, there can be an accident.
file_chooser.set_do_overwrite_confirmation(true);
// Return it.
file_chooser
}
// If we have selected a folder or the PackFile...
else if selection_type == TreePathType::Folder(vec![String::new()]) ||
selection_type == TreePathType::PackFile {
// Create a `FileChooser` to extract folders.
FileChooserNative::new(
"Select Folder destination...",
&app_ui.window,
FileChooserAction::CreateFolder,
"Extract",
"Cancel"
)
}
// Otherwise, return an error.
else {
return show_dialog(&app_ui.window, false, "You can't extract non-existent files.");
};
// If we have selected a file...
if selection_type == TreePathType::File((vec![String::new()], 1)) {
// Set the `FileChooser` current name to the PackFile's name.
file_chooser_extract.set_current_name(&tree_path.last().unwrap());
}
// If we hit "Extract"...
if file_chooser_extract.run() == gtk_response_accept {
// Get the extraction path.
let mut extraction_path = file_chooser_extract.get_filename().unwrap();
// If we have selected the PackFile...
if selection_type == TreePathType::PackFile {
// Add the PackFile's name to the path.
extraction_path.push(&app_ui.folder_tree_store.get_value(&app_ui.folder_tree_store.get_iter_first().unwrap(), 0).get::<String>().unwrap());
// We check that path exists, and create it if it doesn't.
if !extraction_path.is_dir() {
match DirBuilder::new().create(&extraction_path) {
Ok(_) | Err(_) => { /* This returns ok if it created the folder and err if it already exist. */ }
};
}
}
// Try to extract the PackFile.
match packfile::extract_from_packfile(
&*pack_file_decoded.borrow(),
&tree_path,
&extraction_path
) {
Ok(result) => show_dialog(&app_ui.window, true, result),
Err(error) => show_dialog(&app_ui.window, false, error.cause())
}
}
}
}
}
}
));
/*
--------------------------------------------------------
Special Events
--------------------------------------------------------
*/
// When we press "->", we expand the selected folder (if it's a folder). We do the oposite thing with "<-".
app_ui.folder_tree_view.connect_key_release_event(clone!(
pack_file_decoded,
app_ui => move |_, key| {
// We only do something in case the focus is in the TreeView. This should stop problems with
// the accels working everywhere.
if app_ui.folder_tree_view.has_focus() {
// Get the pressed key.
let key_val = key.get_keyval();
// If we press "->"...
if key_val == 65363 {
// We get whatever is selected.
let tree_path = get_tree_path_from_selection(&app_ui.folder_tree_selection, true);
// We get the type of the selected thing.
match get_type_of_selected_tree_path(&tree_path, &*pack_file_decoded.borrow()) {
// If the selected thing it's `PackFile` or `Folder`...
TreePathType::PackFile | TreePathType::Folder(_) => {
// Get his `TreePath`.
let tree_path: TreePath = app_ui.folder_tree_selection.get_selected_rows().0[0].clone();
// And expand it.
app_ui.folder_tree_view.expand_row(&tree_path, false);
},
_ => {},
}
}
// If we press "<-"...
else if key_val == 65361 {
// We get whatever is selected.
let tree_path = get_tree_path_from_selection(&app_ui.folder_tree_selection, true);
// We get the type of the selected thing.
match get_type_of_selected_tree_path(&tree_path, &*pack_file_decoded.borrow()) {
// If the selected thing it's `PackFile` or `Folder`...
TreePathType::PackFile | TreePathType::Folder(_) => {
// Get his `TreePath`.
let tree_path: TreePath = app_ui.folder_tree_selection.get_selected_rows().0[0].clone();
// And collapse it.
app_ui.folder_tree_view.collapse_row(&tree_path);
},
_ => {},
}
}
}
Inhibit(false)
}
));
// When we double-click a file in the `TreeView`, try to decode it with his codec, if it's implemented.
app_ui.folder_tree_view.connect_row_activated(clone!(
game_selected,
application,
schema,
app_ui,
settings,
rpfm_path,
supported_games,
pack_file_decoded,
dependency_database,
is_packedfile_opened,
is_folder_tree_view_locked => move |_,_,_| {
// Before anything else, we need to check if the `TreeView` is unlocked. Otherwise we don't do anything from here.
if !(*is_folder_tree_view_locked.borrow()) {
// First, we destroy any children that the `packed_file_data_display` we use may have, cleaning it.
let childrens_to_utterly_destroy = app_ui.packed_file_data_display.get_children();
if !childrens_to_utterly_destroy.is_empty() {
for i in &childrens_to_utterly_destroy {
i.destroy();
}
}
// Then, we get the `tree_path` selected, and check what it is.
let tree_path = get_tree_path_from_selection(&app_ui.folder_tree_selection, true);
let path_type = get_type_of_selected_tree_path(&tree_path, &pack_file_decoded.borrow());
// We act, depending on his type.
match path_type {
// Only in case it's a file, we do something.
TreePathType::File((tree_path, index)) => {
// Get the name of the PackedFile (we are going to use it a lot).
let packedfile_name = tree_path.last().unwrap().clone();
// First, we get his type to decode it properly
let mut packed_file_type: &str =
// If it's in the "db" folder, it's a DB PackedFile (or you put something were it shouldn't be).
if tree_path[0] == "db" { "DB" }
// If it ends in ".loc", it's a localisation PackedFile.
else if packedfile_name.ends_with(".loc") { "LOC" }
// If it ends in ".rigid_model_v2", it's a RigidModel PackedFile.
else if packedfile_name.ends_with(".rigid_model_v2") { "RIGIDMODEL" }
// If it ends in any of these, it's a plain text PackedFile.
else if packedfile_name.ends_with(".lua") ||
packedfile_name.ends_with(".xml") ||
packedfile_name.ends_with(".xml.shader") ||
packedfile_name.ends_with(".xml.material") ||
packedfile_name.ends_with(".variantmeshdefinition") ||
packedfile_name.ends_with(".environment") ||
packedfile_name.ends_with(".lighting") ||
packedfile_name.ends_with(".wsmodel") ||
packedfile_name.ends_with(".csv") ||
packedfile_name.ends_with(".tsv") ||
packedfile_name.ends_with(".inl") ||
packedfile_name.ends_with(".battle_speech_camera") ||
packedfile_name.ends_with(".bob") ||
//packedfile_name.ends_with(".benchmark") || // This one needs special decoding/encoding.
packedfile_name.ends_with(".txt") { "TEXT" }
// If it ends in any of these, it's an image.
else if packedfile_name.ends_with(".jpg") ||
packedfile_name.ends_with(".jpeg") ||
packedfile_name.ends_with(".tga") ||
packedfile_name.ends_with(".png") { "IMAGE" }
// Otherwise, we don't have a decoder for that PackedFile... yet.
else { "None" };
// Then, depending of his type we decode it properly (if we have it implemented support
// for his type).
match packed_file_type {
// If the file is a Loc PackedFile...
"LOC" => {
if let Err(error) = PackedFileLocTreeView::create_tree_view(
&application,
&app_ui,
&pack_file_decoded,
&index,
&is_packedfile_opened,
&settings.borrow()
) { return show_dialog(&app_ui.window, false, error.cause()) };
// Tell the program there is an open PackedFile.
*is_packedfile_opened.borrow_mut() = true;
}
// If the file is a DB PackedFile...
"DB" => {
if let Err(error) = create_db_view(
&application,
&app_ui,
&rpfm_path,
&pack_file_decoded,
&index,
&is_packedfile_opened,
&schema,
&dependency_database,
&game_selected,
&supported_games,
&settings.borrow()
) { return show_dialog(&app_ui.window, false, error.cause()) };
// Tell the program there is an open PackedFile.
*is_packedfile_opened.borrow_mut() = true;
}
// If it's a plain text file, we create a source view and try to get highlighting for
// his language, if it's an specific language file.
"TEXT" => {
if let Err(error) = create_text_view(
&app_ui,
&pack_file_decoded,
&index,
&is_packedfile_opened,
) { return show_dialog(&app_ui.window, false, error.cause()) };
// Tell the program there is an open PackedFile.
*is_packedfile_opened.borrow_mut() = true;
}
// If it's an image it doesn't require any extra interaction. Just create the View
// and show the Image.
"IMAGE" => {
if let Err(error) = create_image_view(
&app_ui,
&pack_file_decoded,
&index
) { return show_dialog(&app_ui.window, false, error.cause()) };
}
// If it's a rigidmodel, we decode it and take care of his update events.
"RIGIDMODEL" => {
if let Err(error) = PackedFileRigidModelDataView::create_data_view(
&app_ui,
&pack_file_decoded,
&index,
&is_packedfile_opened,
) { return show_dialog(&app_ui.window, false, error.cause()) };
// Tell the program there is an open PackedFile.
*is_packedfile_opened.borrow_mut() = true;
}
// If we reach this point, the coding to implement this type of file is not done yet,
// so we ignore the file.
_ => {
display_help_tips(&app_ui.packed_file_data_display);
}
}
}
// If it's anything else, then we just show the "Tips" list.
_ => display_help_tips(&app_ui.packed_file_data_display),
}
}
}));
// This allow us to open a PackFile by "Drag&Drop" it into the folder_tree_view.
app_ui.folder_tree_view.connect_drag_data_received(clone!(
app_ui,
settings,
schema,
rpfm_path,
mode,
game_selected,
supported_games,
dependency_database,
pack_file_decoded_extra,
pack_file_decoded => move |_, _, _, _, selection_data, info, _| {
// If the current PackFile has been changed in any way, we pop up the "Are you sure?" message.
if are_you_sure(&app_ui.window, pack_file_decoded.borrow().extra_data.is_modified, false) {
// If we got confirmation...
match info {
0 => {
let pack_file_path = Url::parse(&selection_data.get_uris()[0]).unwrap().to_file_path().unwrap();
// Open the PackFile (or die trying it!).
if let Err(error) = open_packfile(
pack_file_path,
&rpfm_path,
&app_ui,
&settings.borrow(),
&mode,
&schema,
&supported_games.borrow(),
&game_selected,
&dependency_database,
&(false, None),
&pack_file_decoded,
&pack_file_decoded_extra
) { show_dialog(&app_ui.window, false, error.cause()) };
}
_ => show_dialog(&app_ui.window, false, "This type of event is not yet used."),
}
}
}
));
// If we have an argument (we open RPFM by clicking in a PackFile directly)...
if arguments.len() > 1 {
// Get the PackFile's path and...
let pack_file_path = PathBuf::from(&arguments[1]);
// Open the PackFile (or die trying it!).
if let Err(error) = open_packfile(
pack_file_path,
&rpfm_path,
&app_ui,
&settings.borrow(),
&mode,
&schema,
&supported_games.borrow(),
&game_selected,
&dependency_database,
&(false, None),
&pack_file_decoded,
&pack_file_decoded_extra
) { show_dialog(&app_ui.window, false, error.cause()) };
}
}
//-----------------------------------------------------------------------------
// From here, there is code that was in the build_ui function, but it was
// becoming a mess to maintain, and was needed to be split.
//-----------------------------------------------------------------------------
/// This function opens the PackFile at the provided Path, and sets all the stuff needed, depending
/// on the situation.
fn open_packfile(
pack_file_path: PathBuf,
rpfm_path: &PathBuf,
app_ui: &AppUI,
settings: &Settings,
mode: &Rc<RefCell<Mode>>,
schema: &Rc<RefCell<Option<Schema>>>,
supported_games: &[GameInfo],
game_selected: &Rc<RefCell<GameSelected>>,
dependency_database: &Rc<RefCell<Option<Vec<PackedFile>>>>,
is_my_mod: &(bool, Option<String>),
pack_file_decoded: &Rc<RefCell<PackFile>>,
pack_file_decoded_extra: &Rc<RefCell<PackFile>>,
) -> Result<(), Error> {
match packfile::open_packfile(pack_file_path.to_path_buf()) {
Ok(pack_file_opened) => {
// If there is no secondary PackFile opened using the "Data View" at the right side...
if pack_file_decoded_extra.borrow().extra_data.file_name.is_empty() {
// We need to destroy any children that the packed_file_data_display we use may have, cleaning it.
let children_to_utterly_destroy = app_ui.packed_file_data_display.get_children();
if !children_to_utterly_destroy.is_empty() {
for i in &children_to_utterly_destroy {
i.destroy();
}
}
// Show the "Tips".
display_help_tips(&app_ui.packed_file_data_display);
}
// Get the PackFile into our main PackFile...
*pack_file_decoded.borrow_mut() = pack_file_opened;
// Update the Window and the TreeView with his data...
set_modified(false, &app_ui.window, &mut pack_file_decoded.borrow_mut());
// Clear the `TreeView` before updating it (fixes CTD with borrowed PackFile).
app_ui.folder_tree_store.clear();
// Build the `TreeView`.
update_treeview(
&app_ui.folder_tree_store,
&pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::Build,
&TreePathType::None,
);
// We choose the right option, depending on our PackFile.
match pack_file_decoded.borrow().header.pack_file_type {
0 => app_ui.menu_bar_change_packfile_type.change_state(&"boot".to_variant()),
1 => app_ui.menu_bar_change_packfile_type.change_state(&"release".to_variant()),
2 => app_ui.menu_bar_change_packfile_type.change_state(&"patch".to_variant()),
3 => app_ui.menu_bar_change_packfile_type.change_state(&"mod".to_variant()),
4 => app_ui.menu_bar_change_packfile_type.change_state(&"movie".to_variant()),
_ => app_ui.menu_bar_change_packfile_type.change_state(&"other".to_variant()),
}
// Disable the "PackFile Management" actions.
enable_packfile_actions(app_ui, game_selected, false);
// If it's a "MyMod", we choose the game selected depending on his folder's name.
if is_my_mod.0 {
// Set `GameSelected` depending on the folder of the "MyMod".
let game_name = is_my_mod.1.clone().unwrap();
game_selected.borrow_mut().change_game_selected(&game_name, &settings.paths.game_paths.iter().filter(|x| x.game == game_name).map(|x| x.path.clone()).collect::<Option<PathBuf>>(), supported_games);
app_ui.menu_bar_change_game_selected.change_state(&game_name.to_variant());
// Set the current "Operational Mode" to `MyMod`.
set_my_mod_mode(app_ui, mode, Some(pack_file_path));
}
// If it's not a "MyMod", we choose the new GameSelected depending on what the open mod id is.
else {
// Set `GameSelected` depending on the ID of the PackFile.
match &*pack_file_decoded.borrow().header.id {
"PFH5" => {
game_selected.borrow_mut().change_game_selected("warhammer_2", &settings.paths.game_paths.iter().filter(|x| &x.game == "warhammer_2").map(|x| x.path.clone()).collect::<Option<PathBuf>>(), supported_games);
app_ui.menu_bar_change_game_selected.change_state(&"warhammer_2".to_variant());
},
"PFH4" | _ => {
// If we have Warhammer selected, we keep Warhammer. If we have Attila, we keep Attila.
// In any other case, we select Attila by default.
match &*(app_ui.menu_bar_change_game_selected.get_state().unwrap().get::<String>().unwrap()) {
"warhammer" => {
game_selected.borrow_mut().change_game_selected("warhammer", &settings.paths.game_paths.iter().filter(|x| &x.game == "warhammer").map(|x| x.path.clone()).collect::<Option<PathBuf>>(), supported_games);
app_ui.menu_bar_change_game_selected.change_state(&"warhammer".to_variant());
}
"attila" | _ => {
game_selected.borrow_mut().change_game_selected("attila", &settings.paths.game_paths.iter().filter(|x| &x.game == "attila").map(|x| x.path.clone()).collect::<Option<PathBuf>>(), supported_games);
app_ui.menu_bar_change_game_selected.change_state(&"attila".to_variant());
}
}
},
}
// Change the `dependency_database` for that game.
*dependency_database.borrow_mut() = match packfile::open_packfile(game_selected.borrow().game_dependency_packfile_path.to_path_buf()) {
Ok(data) => Some(data.data.packed_files),
Err(_) => None,
};
// Set the current "Operational Mode" to `Normal`.
set_my_mod_mode(app_ui, mode, None);
}
// Enable the "PackFile Management" actions.
enable_packfile_actions(app_ui, game_selected, true);
// Try to load the Schema for this PackFile's game.
*schema.borrow_mut() = Schema::load(rpfm_path, &supported_games.iter().filter(|x| x.folder_name == *game_selected.borrow().game).map(|x| x.schema.to_owned()).collect::<String>()).ok();
// Test to see if every DB Table can be decoded.
// let mut counter = 0;
// for i in pack_file_decoded.borrow().pack_file_data.packed_files.iter() {
// if i.packed_file_path.starts_with(&["db".to_owned()]) {
// if let Some(ref schema) = *schema {
// if let Err(_) = DB::read(&i.packed_file_data, &i.packed_file_path[1], &schema) {
// match DBHeader::read(&i.packed_file_data) {
// Ok(db_header) => {
// if db_header.0.packed_file_header_packed_file_entry_count > 0 {
// counter += 1;
// println!("{}, {:?}", counter, i.packed_file_path);
// }
// }
// Err(_) => println!("Error in {:?}", i.packed_file_path),
// }
// }
// }
// }
// }
// Return success.
Ok(())
}
// In case of error while opening the PackFile, return the error.
Err(error) => Err(error),
}
}
/// This function takes care of the re-creation of the "MyMod" list in the following moments:
/// - At the start of the program (here).
/// - At the end of MyMod deletion.
/// - At the end of MyMod creation.
/// - At the end of settings update.
fn build_my_mod_menu(
application: &Application,
app_ui: &AppUI,
settings: &Settings,
mode: &Rc<RefCell<Mode>>,
schema: &Rc<RefCell<Option<Schema>>>,
game_selected: &Rc<RefCell<GameSelected>>,
supported_games: &Rc<RefCell<Vec<GameInfo>>>,
dependency_database: &Rc<RefCell<Option<Vec<PackedFile>>>>,
pack_file_decoded: &Rc<RefCell<PackFile>>,
pack_file_decoded_extra: &Rc<RefCell<PackFile>>,
rpfm_path: &PathBuf,
) {
// First, we clear the list.
app_ui.my_mod_list.remove_all();
// If we have the "MyMod" path configured...
if let Some(ref my_mod_base_path) = settings.paths.my_mods_base_path {
// And can get without errors the folders in that path...
if let Ok(game_folder_list) = my_mod_base_path.read_dir() {
// We get all the games that have mods created (Folder exists and has at least a *.pack file inside).
for game_folder in game_folder_list {
// If the file/folder is valid, we see if it's one of our supported game's folder.
if let Ok(game_folder) = game_folder {
let supported_folders = supported_games.borrow().iter().map(|x| x.folder_name.to_owned()).collect::<Vec<String>>();
if game_folder.path().is_dir() && supported_folders.contains(&game_folder.file_name().to_string_lossy().as_ref().to_owned()) {
// We create that game's menu here.
let game_submenu: Menu = Menu::new();
let game_folder_name = game_folder.file_name().to_string_lossy().as_ref().to_owned();
// If there were no errors while reading the path...
if let Ok(game_folder_files) = game_folder.path().read_dir() {
// Index to count the valid packfiles.
let mut valid_mod_index = 0;
// We need to sort these files, so they appear sorted in the menu.
// FIXME: remove this unwrap.
let mut game_folder_files_sorted: Vec<_> = game_folder_files.map(|res| res.unwrap().path()).collect();
game_folder_files_sorted.sort();
// We get all the stuff in that game's folder...
for game_folder_file in game_folder_files_sorted {
// And it's a file that ends in .pack...
if game_folder_file.is_file() &&
game_folder_file.extension().unwrap_or_else(||OsStr::new("invalid")).to_string_lossy() =="pack" {
// That means our game_folder is a valid folder and it needs to be added to the menu.
let mod_name = game_folder_file.file_name().unwrap_or_else(||OsStr::new("invalid")).to_string_lossy().as_ref().to_owned();
let mod_action = &*format!("my-mod-open-{}-{}", game_folder_name, valid_mod_index);
// GTK have... behavior that needs to be changed when showing "_".
let mod_name_visual = mod_name.replace('_', "__");
game_submenu.append(Some(&*mod_name_visual), Some(&*format!("app.{}", mod_action)));
// We create the action for the new button.
let open_mod = SimpleAction::new(mod_action, None);
application.add_action(&open_mod);
// And when activating the mod button, we open it and set it as selected (chaos incoming).
let game_folder_name = Rc::new(RefCell::new(game_folder_name.clone()));
open_mod.connect_activate(clone!(
app_ui,
settings,
schema,
mode,
game_folder_name,
rpfm_path,
supported_games,
game_selected,
dependency_database,
pack_file_decoded_extra,
pack_file_decoded => move |_,_| {
// If the current PackFile has been changed in any way, we pop up the "Are you sure?" message.
if are_you_sure(&app_ui.window, pack_file_decoded.borrow().extra_data.is_modified, false) {
// If we got confirmation...
let pack_file_path = game_folder_file.to_path_buf();
// Open the PackFile (or die trying it!).
if let Err(error) = open_packfile(
pack_file_path,
&rpfm_path,
&app_ui,
&settings,
&mode,
&schema,
&supported_games.borrow(),
&game_selected,
&dependency_database,
&(true, Some(game_folder_name.borrow().to_owned())),
&pack_file_decoded,
&pack_file_decoded_extra
) { show_dialog(&app_ui.window, false, error.cause()) };
}
}
));
valid_mod_index += 1;
}
}
}
// Only if the submenu has items, we add it to the big menu.
if game_submenu.get_n_items() > 0 {
let game_submenu_name = supported_games.borrow().iter().filter(|x| game_folder_name == x.folder_name).map(|x| x.display_name.to_owned()).collect::<String>();
app_ui.my_mod_list.append_submenu(Some(&*format!("{}", game_submenu_name)), &game_submenu);
}
}
}
}
}
}
}
/// This function serves as a common function for all the "Patch SiegeAI" buttons from "Special Stuff".
fn patch_siege_ai(
app_ui: &AppUI,
pack_file_decoded: &Rc<RefCell<PackFile>>,
) {
// First, we try to patch the PackFile. If there are no errors, we save the result in a tuple.
// Then we check that tuple and, if it's a success, we save the PackFile and update the TreeView.
let mut sucessful_patching = (false, String::new());
match packfile::patch_siege_ai(&mut *pack_file_decoded.borrow_mut()) {
Ok(result) => sucessful_patching = (true, result),
Err(error) => show_dialog(&app_ui.window, false, error.cause())
}
if sucessful_patching.0 {
let mut success = false;
match packfile::save_packfile( &mut *pack_file_decoded.borrow_mut(), None) {
Ok(_) => {
success = true;
show_dialog(&app_ui.window, true, format!("{}\n\n{}", sucessful_patching.1, "PackFile succesfully saved."));
},
Err(error) => show_dialog(&app_ui.window, false, error.cause())
}
// If it succeed...
if success {
// Clear the `TreeView` before updating it (fixes CTD with borrowed PackFile).
app_ui.folder_tree_store.clear();
// TODO: Make this update, not rebuild.
// Rebuild the `TreeView`.
update_treeview(
&app_ui.folder_tree_store,
&*pack_file_decoded.borrow(),
&app_ui.folder_tree_selection,
TreeViewOperation::Build,
&TreePathType::None,
);
}
}
}
/// This function serves as a common function for all the "Generate Dependency Pack" buttons from "Special Stuff".
fn generate_dependency_pack(
app_ui: &AppUI,
rpfm_path: &PathBuf,
game_selected: &Rc<RefCell<GameSelected>>,
) {
// Get the data folder of game_selected and try to create our dependency PackFile.
match game_selected.borrow().game_data_path {
Some(ref path) => {
let mut data_pack_path = path.to_path_buf();
data_pack_path.push("data.pack");
match packfile::open_packfile(data_pack_path) {
Ok(ref mut data_packfile) => {
data_packfile.data.packed_files.retain(|packed_file| packed_file.path.starts_with(&["db".to_owned()]));
data_packfile.header.packed_file_count = data_packfile.data.packed_files.len() as u32;
// Just in case the folder doesn't exists, we try to create it.
let mut dep_packs_path = rpfm_path.clone();
dep_packs_path.push("dependency_packs");
match DirBuilder::new().create(&dep_packs_path) { Ok(_) | Err(_) => {}, }
let pack_file_path = game_selected.borrow().game_dependency_packfile_path.to_path_buf();
match packfile::save_packfile(data_packfile, Some(pack_file_path)) {
Ok(_) => show_dialog(&app_ui.window, true, "Dependency pack created. Remember to re-create it if you update the game ;)."),
Err(error) => show_dialog(&app_ui.window, false, format_err!("Error: generated dependency pack couldn't be saved. {:?}", error)),
}
}
Err(_) => show_dialog(&app_ui.window, false, "Error: data.pack couldn't be open.")
}
},
None => show_dialog(&app_ui.window, false, "Error: data path of the game not found.")
}
}
/// This function serves as a common function to all the "Create Prefab" buttons from "Special Stuff".
fn create_prefab(
application: &Application,
app_ui: &AppUI,
game_selected: &Rc<RefCell<GameSelected>>,
pack_file_decoded: &Rc<RefCell<PackFile>>,
) {
// Create the list of PackedFiles to "move".
let mut prefab_catchments: Vec<usize> = vec![];
// For each PackedFile...
for (index, packed_file) in pack_file_decoded.borrow().data.packed_files.iter().enumerate() {
// If it's in the exported map's folder...
if packed_file.path.starts_with(&["terrain".to_owned(), "tiles".to_owned(), "battle".to_owned(), "_assembly_kit".to_owned()]) {
// Get his name.
let packed_file_name = packed_file.path.last().unwrap();
// If it's one of the exported layers...
if packed_file_name.starts_with("catchment") && packed_file_name.ends_with(".bin") {
// Add it to the list.
prefab_catchments.push(index);
}
}
}
// If we found at least one catchment PackedFile...
if !prefab_catchments.is_empty() {
// Disable the main window, so the user can't do anything until all the prefabs are processed.
app_ui.window.set_sensitive(false);
// We create a "New Prefabs" window.
NewPrefabWindow::create_new_prefab_window(
&app_ui,
application,
game_selected,
pack_file_decoded,
&prefab_catchments
);
}
// If there are not suitable PackedFiles...
else { show_dialog(&app_ui.window, false, "There are no catchment PackedFiles in this PackFile."); }
}
/// This function is used to set the current "Operational Mode". It not only sets the "Operational Mode",
/// but it also takes care of disabling or enabling all the signals related with the "MyMod" Mode.
/// If `my_mod_path` is None, we want to set the `Normal` mode. Otherwise set the `MyMod` mode.
fn set_my_mod_mode(
app_ui: &AppUI,
mode: &Rc<RefCell<Mode>>,
my_mod_path: Option<PathBuf>,
) {
// Check if we provided a "my_mod_path".
match my_mod_path {
// If we have a `my_mod_path`...
Some(my_mod_path) => {
// Get the `folder_name` and the `mod_name` of our "MyMod".
let mut path = my_mod_path.clone();
let mod_name = path.file_name().unwrap().to_string_lossy().as_ref().to_owned();
path.pop();
let game_folder_name = path.file_name().unwrap().to_string_lossy().as_ref().to_owned();
// Set the current mode to `MyMod`.
*mode.borrow_mut() = Mode::MyMod {
game_folder_name,
mod_name,
};
// Enable the controls for "MyMod".
app_ui.menu_bar_my_mod_delete.set_enabled(true);
app_ui.menu_bar_my_mod_install.set_enabled(true);
app_ui.menu_bar_my_mod_uninstall.set_enabled(true);
}
// If `None` has been provided...
None => {
// Set the current mode to `Normal`.
*mode.borrow_mut() = Mode::Normal;
// Disable all "MyMod" related actions, except "New MyMod".
app_ui.menu_bar_my_mod_delete.set_enabled(false);
app_ui.menu_bar_my_mod_install.set_enabled(false);
app_ui.menu_bar_my_mod_uninstall.set_enabled(false);
}
}
}
/// This function enables or disables the actions from the `MenuBar` needed when we open a PackFile.
/// NOTE: To disable the "Special Stuff" actions, we use `disable`
fn enable_packfile_actions(app_ui: &AppUI, game_selected: &Rc<RefCell<GameSelected>>, enable: bool) {
// Enable or disable the actions from "PackFile" Submenu.
app_ui.menu_bar_save_packfile.set_enabled(enable);
app_ui.menu_bar_save_packfile_as.set_enabled(enable);
app_ui.menu_bar_change_packfile_type.set_enabled(enable);
// Only if we are enabling...
if enable {
// Enable the actions from the "Special Stuff" Submenu.
match &*game_selected.borrow().game {
"warhammer_2" => {
app_ui.menu_bar_generate_dependency_pack_wh2.set_enabled(true);
app_ui.menu_bar_patch_siege_ai_wh2.set_enabled(true);
app_ui.menu_bar_create_map_prefab_wh2.set_enabled(true);
},
"warhammer" => {
app_ui.menu_bar_generate_dependency_pack_wh.set_enabled(true);
app_ui.menu_bar_patch_siege_ai_wh.set_enabled(true);
app_ui.menu_bar_create_map_prefab_wh.set_enabled(true);
},
"attila" => {
app_ui.menu_bar_generate_dependency_pack_att.set_enabled(true);
},
_ => {},
}
}
// If we are disabling...
else {
// Disable Warhammer 2 actions...
app_ui.menu_bar_generate_dependency_pack_wh2.set_enabled(false);
app_ui.menu_bar_patch_siege_ai_wh2.set_enabled(false);
app_ui.menu_bar_create_map_prefab_wh2.set_enabled(false);
// Disable Warhammer actions...
app_ui.menu_bar_generate_dependency_pack_wh.set_enabled(false);
app_ui.menu_bar_patch_siege_ai_wh.set_enabled(false);
app_ui.menu_bar_create_map_prefab_wh.set_enabled(false);
// Disable Attila actions...
app_ui.menu_bar_generate_dependency_pack_att.set_enabled(false);
}
}
/// This function concatenates the last two messages of the status_bar and shows them like one.
fn concatenate_check_update_messages(status_bar: &Statusbar) {
// Get the ID of all messages passed to the status_bar with the helper function.
let context_id = status_bar.get_context_id("Yekaterina");
// Get the current text, if any.
let current_text = status_bar.get_message_area().unwrap().get_children()[0].clone().downcast::<Label>().unwrap().get_text().unwrap();
// Remove it from the status_bar.
status_bar.pop(context_id);
// Get the new current text, if any.
let old_text = status_bar.get_message_area().unwrap().get_children()[0].clone().downcast::<Label>().unwrap().get_text().unwrap();
// Concatenate both texts and push them.
let new_text = format!("{} {}", old_text, current_text);
status_bar.push(context_id, &new_text);
}
/// Main function.
fn main() {
// We create the application.
let application = Application::new("com.github.frodo45127.rpfm", gio::ApplicationFlags::NON_UNIQUE).expect("Initialization failed...");
// We initialize it.
application.connect_startup(move |app| {
build_ui(app);
});
// We start GTK. Yay.
application.connect_activate(|_| {});
// And we run for our lives before it explodes.
application.run(&args().collect::<Vec<_>>());
}
|
use cosmwasm_std::{
from_binary, log, to_binary, Api, Binary, CanonicalAddr, Decimal, Env, Extern, HandleResponse,
HandleResult, HumanAddr, InitResponse, MigrateResponse, MigrateResult, Order, Querier,
StdError, StdResult, Storage, Uint128,
};
use crate::{
bond::bond,
compound::{compound, stake},
state::{read_config, state_store, store_config, Config, PoolInfo, State},
};
use cw20::Cw20ReceiveMsg;
use crate::bond::{deposit_spec_reward, query_reward_info, unbond, withdraw};
use crate::state::{pool_info_read, pool_info_store, read_state};
use spectrum_protocol::anchor_farm::{QueryMsg, PoolsResponse, HandleMsg, ConfigInfo, Cw20HookMsg, PoolItem, StateInfo, MigrateMsg};
/// (we require 0-1)
fn validate_percentage(value: Decimal, field: &str) -> StdResult<()> {
if value > Decimal::one() {
Err(StdError::generic_err(field.to_string() + " must be 0 to 1"))
} else {
Ok(())
}
}
pub fn init<S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
msg: ConfigInfo,
) -> StdResult<InitResponse> {
validate_percentage(msg.community_fee, "community_fee")?;
validate_percentage(msg.platform_fee, "platform_fee")?;
validate_percentage(msg.controller_fee, "controller_fee")?;
let api = deps.api;
store_config(
&mut deps.storage,
&Config {
owner: deps.api.canonical_address(&msg.owner)?,
terraswap_factory: deps.api.canonical_address(&msg.terraswap_factory)?,
spectrum_token: deps.api.canonical_address(&msg.spectrum_token)?,
spectrum_gov: deps.api.canonical_address(&msg.spectrum_gov)?,
anchor_token: deps.api.canonical_address(&msg.anchor_token)?,
anchor_staking: deps.api.canonical_address(&msg.anchor_staking)?,
anchor_gov: deps.api.canonical_address(&msg.anchor_gov)?,
platform: if let Some(platform) = msg.platform {
api.canonical_address(&platform)?
} else {
CanonicalAddr::default()
},
controller: if let Some(controller) = msg.controller {
api.canonical_address(&controller)?
} else {
CanonicalAddr::default()
},
base_denom: msg.base_denom,
community_fee: msg.community_fee,
platform_fee: msg.platform_fee,
controller_fee: msg.controller_fee,
deposit_fee: msg.deposit_fee,
lock_start: msg.lock_start,
lock_end: msg.lock_end,
},
)?;
state_store(&mut deps.storage).save(&State {
contract_addr: deps.api.canonical_address(&env.contract.address)?,
previous_spec_share: Uint128::zero(),
spec_share_index: Decimal::zero(),
total_farm_share: Uint128::zero(),
total_weight: 0u32,
earning: Uint128::zero(),
})?;
Ok(InitResponse::default())
}
pub fn handle<S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
msg: HandleMsg,
) -> StdResult<HandleResponse> {
match msg {
HandleMsg::receive(msg) => receive_cw20(deps, env, msg),
HandleMsg::update_config {
owner,
platform,
controller,
community_fee,
platform_fee,
controller_fee,
deposit_fee,
lock_start,
lock_end,
} => update_config(
deps,
env,
owner,
platform,
controller,
community_fee,
platform_fee,
controller_fee,
deposit_fee,
lock_start,
lock_end,
),
HandleMsg::register_asset {
asset_token,
staking_token,
weight,
auto_compound,
} => register_asset(deps, env, asset_token, staking_token, weight, auto_compound),
HandleMsg::unbond {
asset_token,
amount,
} => unbond(deps, env, asset_token, amount),
HandleMsg::withdraw { asset_token } => withdraw(deps, env, asset_token),
HandleMsg::stake { asset_token } => stake(deps, env, asset_token),
HandleMsg::compound {} => compound(deps, env)
}
}
pub fn receive_cw20<S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
cw20_msg: Cw20ReceiveMsg,
) -> HandleResult {
if let Some(msg) = cw20_msg.msg {
match from_binary(&msg)? {
Cw20HookMsg::bond {
staker_addr,
asset_token,
compound_rate,
} => bond(
deps,
env,
staker_addr.unwrap_or(cw20_msg.sender),
asset_token,
cw20_msg.amount,
compound_rate,
),
}
} else {
Err(StdError::generic_err("data should be given"))
}
}
pub fn update_config<S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
owner: Option<HumanAddr>,
platform: Option<HumanAddr>,
controller: Option<HumanAddr>,
community_fee: Option<Decimal>,
platform_fee: Option<Decimal>,
controller_fee: Option<Decimal>,
deposit_fee: Option<Decimal>,
lock_start: Option<u64>,
lock_end: Option<u64>,
) -> StdResult<HandleResponse> {
let mut config: Config = read_config(&deps.storage)?;
if deps.api.canonical_address(&env.message.sender)? != config.owner {
return Err(StdError::unauthorized());
}
if let Some(owner) = owner {
config.owner = deps.api.canonical_address(&owner)?;
}
if let Some(platform) = platform {
config.platform = deps.api.canonical_address(&platform)?;
}
if let Some(controller) = controller {
config.controller = deps.api.canonical_address(&controller)?;
}
if let Some(community_fee) = community_fee {
validate_percentage(community_fee, "community_fee")?;
config.community_fee = community_fee;
}
if let Some(platform_fee) = platform_fee {
validate_percentage(platform_fee, "platform_fee")?;
config.platform_fee = platform_fee;
}
if let Some(controller_fee) = controller_fee {
validate_percentage(controller_fee, "controller_fee")?;
config.controller_fee = controller_fee;
}
if let Some(deposit_fee) = deposit_fee {
validate_percentage(deposit_fee, "deposit_fee")?;
config.deposit_fee = deposit_fee;
}
if let Some(lock_start) = lock_start {
config.lock_start = lock_start;
}
if let Some(lock_end) = lock_end {
config.lock_end = lock_end;
}
store_config(&mut deps.storage, &config)?;
Ok(HandleResponse {
messages: vec![],
log: vec![log("action", "update_config")],
data: None,
})
}
pub fn register_asset<S: Storage, A: Api, Q: Querier>(
deps: &mut Extern<S, A, Q>,
env: Env,
asset_token: HumanAddr,
staking_token: HumanAddr,
weight: u32,
auto_compound: bool,
) -> HandleResult {
let config: Config = read_config(&deps.storage)?;
let asset_token_raw = deps.api.canonical_address(&asset_token)?;
if config.owner != deps.api.canonical_address(&env.message.sender)? {
return Err(StdError::unauthorized());
}
let pool_count = pool_info_read(&deps.storage)
.range(None, None, Order::Descending).count();
if pool_count >= 1 {
return Err(StdError::generic_err("Already registered one asset"))
}
let mut state = read_state(&deps.storage)?;
deposit_spec_reward(deps, &mut state, &config, env.block.height, false)?;
let mut pool_info = pool_info_read(&deps.storage)
.may_load(asset_token_raw.as_slice())?
.unwrap_or_else(|| PoolInfo {
staking_token: deps.api.canonical_address(&staking_token).unwrap(),
total_auto_bond_share: Uint128::zero(),
total_stake_bond_share: Uint128::zero(),
total_stake_bond_amount: Uint128::zero(),
weight: 0u32,
auto_compound: false,
farm_share: Uint128::zero(),
farm_share_index: Decimal::zero(),
state_spec_share_index: state.spec_share_index,
auto_spec_share_index: Decimal::zero(),
stake_spec_share_index: Decimal::zero(),
reinvest_allowance: Uint128::zero(),
});
state.total_weight = state.total_weight + weight - pool_info.weight;
pool_info.weight = weight;
pool_info.auto_compound = auto_compound;
pool_info_store(&mut deps.storage).save(&asset_token_raw.as_slice(), &pool_info)?;
state_store(&mut deps.storage).save(&state)?;
Ok(HandleResponse {
messages: vec![],
log: vec![
log("action", "register_asset"),
log("asset_token", asset_token.as_str()),
],
data: None,
})
}
pub fn query<S: Storage, A: Api, Q: Querier>(
deps: &Extern<S, A, Q>,
msg: QueryMsg,
) -> StdResult<Binary> {
match msg {
QueryMsg::config {} => to_binary(&query_config(deps)?),
QueryMsg::pools {} => to_binary(&query_pools(deps)?),
QueryMsg::reward_info {
staker_addr,
height,
} => to_binary(&query_reward_info(deps, staker_addr, height)?),
QueryMsg::state {} => to_binary(&query_state(deps)?),
}
}
pub fn query_config<S: Storage, A: Api, Q: Querier>(
deps: &Extern<S, A, Q>,
) -> StdResult<ConfigInfo> {
let config = read_config(&deps.storage)?;
let resp = ConfigInfo {
owner: deps.api.human_address(&config.owner)?,
terraswap_factory: deps.api.human_address(&config.terraswap_factory)?,
spectrum_token: deps.api.human_address(&config.spectrum_token)?,
anchor_token: deps.api.human_address(&config.anchor_token)?,
anchor_staking: deps.api.human_address(&config.anchor_staking)?,
spectrum_gov: deps.api.human_address(&config.spectrum_gov)?,
anchor_gov: deps.api.human_address(&config.anchor_gov)?,
platform: if config.platform == CanonicalAddr::default() {
None
} else {
Some(deps.api.human_address(&config.platform)?)
},
controller: if config.controller == CanonicalAddr::default() {
None
} else {
Some(deps.api.human_address(&config.controller)?)
},
base_denom: config.base_denom,
community_fee: config.community_fee,
platform_fee: config.platform_fee,
controller_fee: config.controller_fee,
deposit_fee: config.deposit_fee,
lock_start: config.lock_start,
lock_end: config.lock_end,
};
Ok(resp)
}
fn query_pools<S: Storage, A: Api, Q: Querier>(deps: &Extern<S, A, Q>) -> StdResult<PoolsResponse> {
let pools = pool_info_read(&deps.storage)
.range(None, None, Order::Descending)
.map(|item| {
let (asset_token, pool_info) = item?;
Ok(PoolItem {
asset_token: deps.api.human_address(&CanonicalAddr::from(asset_token))?,
staking_token: deps.api.human_address(&pool_info.staking_token)?,
weight: pool_info.weight,
auto_compound: pool_info.auto_compound,
total_auto_bond_share: pool_info.total_auto_bond_share,
total_stake_bond_share: pool_info.total_stake_bond_share,
total_stake_bond_amount: pool_info.total_stake_bond_amount,
farm_share: pool_info.farm_share,
state_spec_share_index: pool_info.state_spec_share_index,
farm_share_index: pool_info.farm_share_index,
stake_spec_share_index: pool_info.stake_spec_share_index,
auto_spec_share_index: pool_info.auto_spec_share_index,
reinvest_allowance: pool_info.reinvest_allowance,
})
})
.collect::<StdResult<Vec<PoolItem>>>()?;
Ok(PoolsResponse { pools })
}
fn query_state<S: Storage, A: Api, Q: Querier>(deps: &Extern<S, A, Q>) -> StdResult<StateInfo> {
let state = read_state(&deps.storage)?;
Ok(StateInfo {
spec_share_index: state.spec_share_index,
previous_spec_share: state.previous_spec_share,
total_farm_share: state.total_farm_share,
total_weight: state.total_weight,
})
}
pub fn migrate<S: Storage, A: Api, Q: Querier>(
_deps: &mut Extern<S, A, Q>,
_env: Env,
_msg: MigrateMsg,
) -> MigrateResult {
Ok(MigrateResponse::default())
}
|
use std::{
fmt::{Debug, Display},
str::FromStr,
};
use lazy_static::lazy_static;
use never::Never;
use regex::Regex;
use super::{Field, index::Index};
#[derive(PartialEq, Eq, Hash)]
pub enum DocumentType {
Almanaque,
Anais,
Anuario,
Artigo,
Ata,
Atlas,
Biografia,
Boletim,
Carta,
CartaTopografica,
Catalogo,
Conferencia,
Dicionario,
Discurso,
DocumentoJuridico,
DocumentoOficial,
Estatuto,
Folheto,
Jornal,
Literatura,
Livro,
Manifesto,
Manuscristo,
Mapa,
Medalha,
Memorias,
Moeda,
Obra,
Periodico,
Poesia,
Relatorio,
Revista,
Tese,
Other(String),
}
impl AsRef<str> for DocumentType {
fn as_ref(&self) -> &str {
match self {
DocumentType::Almanaque => "Almanaque",
DocumentType::Anais => "Anais",
DocumentType::Anuario => "Anuário",
DocumentType::Artigo => "Artigo",
DocumentType::Ata => "Ata",
DocumentType::Atlas => "Atlas",
DocumentType::Biografia => "Biografia",
DocumentType::Boletim => "Boletim",
DocumentType::Carta => "Carta",
DocumentType::CartaTopografica => "Carta Topográfica",
DocumentType::Catalogo => "Catalogo",
DocumentType::Conferencia => "Conferência",
DocumentType::Dicionario => "Dicionário",
DocumentType::Discurso => "Discurso",
DocumentType::DocumentoJuridico => "Documento Juridico",
DocumentType::DocumentoOficial => "Documento Oficial",
DocumentType::Estatuto => "Estatuto",
DocumentType::Folheto => "Folheto",
DocumentType::Jornal => "Jornal",
DocumentType::Literatura => "Literatura",
DocumentType::Livro => "Livro",
DocumentType::Manifesto => "Manifesto",
DocumentType::Manuscristo => "Manuscristo",
DocumentType::Mapa => "Mapa",
DocumentType::Medalha => "Medalha",
DocumentType::Memorias => "Memórias",
DocumentType::Moeda => "Moeda",
DocumentType::Obra => "Obra",
DocumentType::Periodico => "Periódico",
DocumentType::Poesia => "Poesia",
DocumentType::Relatorio => "Relatório",
DocumentType::Revista => "Revista",
DocumentType::Tese => "Tese",
DocumentType::Other(s) => s,
}
}
}
impl Display for DocumentType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DocumentType::Other(s) => {
let (index, fst) = match s.chars().next() {
Some(c) => (c.len_utf8(), c),
None => return Ok(()),
};
let (_, tail) = s.as_str().split_at(index);
write!(f, "{}{}", fst.to_uppercase(), tail)
}
_ => write!(f, "{}", self.as_ref()),
}
}
}
impl Debug for DocumentType {
#[inline]
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
<Self as Display>::fmt(&self, f)
}
}
impl PartialOrd for DocumentType {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for DocumentType {
#[inline]
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.as_ref().cmp(other.as_ref())
}
}
impl DocumentType {
fn index(&self) -> Index {
self.as_ref().into()
}
}
impl FromStr for DocumentType {
type Err = Never;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"periódico" | "jornais e revistas" | "jornal e revistas" => Self::Periodico,
"jornal" | "jornais" => Self::Jornal,
"revista" | "revivsta" | "revistas" => Self::Revista,
"livro" | "livros" => Self::Livro,
"documento oficial"
| "documentos oficiais"
| "documento official"
| "documento oficial" => Self::DocumentoOficial,
"relatório" | "relatorio" => Self::Relatorio,
"boletim" => Self::Boletim,
"medalha comemorativa" | "medalha" => Self::Medalha,
"catálogo" | "catalogo" => Self::Catalogo,
"anuário" | "anuario" => Self::Anuario,
"manuscrito" | "manuscritos" | "manuscristo" => Self::Manuscristo,
"anais" => Self::Anais,
"discurso" | "discuro" | "discurso oficial" => Self::Discurso,
"dicionário" | "dicionario" => Self::Dicionario,
"moeda" => Self::Moeda,
"ata" | "acta" => Self::Ata,
"mapa" | "mapas" => Self::Mapa,
"carta" | "cartas" => Self::Carta,
"obra" => Self::Obra,
"biografia" => Self::Biografia,
"almanaque" => Self::Almanaque,
"estatuto" | "estatutos" => Self::Estatuto,
"conferência" => Self::Conferencia,
"folheto" => Self::Folheto,
"manifesto" | "folhetos" => Self::Manifesto,
"tese" => Self::Tese,
"atlas" => Self::Atlas,
"documento jurídico" => Self::DocumentoJuridico,
"folha topographica" | "planta topográfica" | "carta topográfica" => {
Self::CartaTopografica
}
"artigos" | "artigo" | "artigos de jornal" => Self::Artigo,
"memórias" | "memória" => Self::Memorias,
"textos literários" | "literatura" => Self::Literatura,
"poesias" | "poema" => Self::Poesia,
s => Self::Other(s.to_owned()),
})
}
}
#[derive(PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct DocumentTypes {
types: Vec<Field<DocumentType>>,
}
impl Into<Vec<Field<DocumentType>>> for DocumentTypes {
fn into(self) -> Vec<Field<DocumentType>> {
self.types
}
}
impl Display for DocumentTypes {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut iter = self.types.iter();
fn kind_and_index(
t: &Field<DocumentType>,
f: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
write!(f, "{}", t)?;
match t {
Field::Absent => Ok(()),
Field::Present(k) => write!(f, "{}", k.index()),
Field::NotStandard(_) => unreachable!(),
}
}
for t in iter.next() {
kind_and_index(t, f)?;
}
for t in iter {
f.write_str("/")?;
kind_and_index(t, f)?;
}
Ok(())
}
}
impl Debug for DocumentTypes {
#[inline]
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
<Self as Display>::fmt(&self, f)
}
}
impl FromStr for DocumentTypes {
type Err = Never;
fn from_str(s: &str) -> Result<Self, Self::Err> {
lazy_static! {
static ref RE: Regex = Regex::new(r"([^(?]+)?(?:\(([^)]+)\))?(\?)?").unwrap();
}
let mut types = Vec::new();
for part in s.split('/') {
for part in RE.captures_iter(part) {
for cap in part.iter().filter(Option::is_some).flatten().skip(1) {
types.push(cap.as_str().trim().parse().unwrap());
}
}
}
Ok(DocumentTypes { types })
}
}
#[test]
fn parse_string() {
let types: DocumentTypes = "periódico (drama)".parse().unwrap();
assert_eq!(
types,
DocumentTypes {
types: vec![
Field::Present(DocumentType::Periodico),
Field::Present(DocumentType::Other("drama".into()))
]
}
);
let types: DocumentTypes = "boletim / documentos oficiais".parse().unwrap();
assert_eq!(
types,
DocumentTypes {
types: vec![
Field::Present(DocumentType::Boletim),
Field::Present(DocumentType::DocumentoOficial)
]
}
);
let types: DocumentTypes = "boletim ( a conferir )".parse().unwrap();
assert_eq!(
types,
DocumentTypes {
types: vec![Field::Present(DocumentType::Boletim), Field::Absent]
}
);
}
|
use std::fmt;
/// Write a hexdump of the provided byte slice.
pub fn hexdump(
f: &mut fmt::Formatter,
prefix: &str,
buffer: &[u8],
) -> std::result::Result<(), std::fmt::Error> {
const COLUMNS: usize = 16;
let mut offset: usize = 0;
if buffer.is_empty() {
// For a zero-length buffer, at least print an offset instead of
// nothing.
write!(f, "{}{:04x}: ", prefix, 0)?;
}
while offset < buffer.len() {
write!(f, "{}{:04x}: ", prefix, offset)?;
// Determine row byte range
let next_offset = offset + COLUMNS;
let (row_size, padding) = if next_offset <= buffer.len() {
(COLUMNS, 0)
} else {
(buffer.len() - offset, next_offset - buffer.len())
};
let row = &buffer[offset..offset + row_size];
// Print hex representation
for b in row {
write!(f, "{:02x} ", b)?;
}
for _ in 0..padding {
write!(f, " ")?;
}
// Print ASCII representation
for b in row {
write!(
f,
"{}",
match *b {
c @ 0x20..=0x7E => c as char,
_ => '.',
}
)?;
}
offset += COLUMNS;
if offset < buffer.len() {
writeln!(f)?;
}
}
Ok(())
}
#[allow(dead_code)]
pub struct Hex<'a>(pub &'a [u8]);
impl<'a> fmt::Display for Hex<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
hexdump(f, "", self.0)
}
}
pub fn hex(bytes: &[u8]) -> Hex {
Hex(bytes)
}
/// Provide a convenience function for getting a byte slice based on its length
/// instead of its ending position.
pub trait Slice {
fn slice(&self, offset: usize, size: usize) -> &[u8];
}
impl<'a> Slice for &'a [u8] {
#[inline]
fn slice(&self, offset: usize, size: usize) -> &[u8] {
&self[offset..offset + size]
}
}
/// Provide a convenience function for getting a mutable byte slice based on
/// its length instead of its ending position.
pub trait SliceMut {
fn slice_mut(&mut self, offset: usize, size: usize) -> &mut [u8];
}
impl<'a> SliceMut for &'a mut [u8] {
#[inline]
fn slice_mut(&mut self, offset: usize, size: usize) -> &mut [u8] {
&mut self[offset..offset + size]
}
}
|
#![cfg_attr(
any(feature = "print_attributes", feature = "hacspec_unsafe"),
feature(proc_macro_diagnostic)
)]
#![cfg_attr(
any(feature = "print_attributes", feature = "hacspec_unsafe"),
feature(proc_macro_span)
)]
extern crate ansi_term;
extern crate hacspec_util;
extern crate quote;
extern crate serde;
extern crate serde_json;
extern crate syn;
#[cfg(feature = "print_attributes")]
use ansi_term::Colour::{Blue, Green, Purple, Yellow};
#[cfg(feature = "print_attributes")]
use hacspec_util::{syn_sig_to_reduced, Signature};
#[cfg(feature = "hacspec_unsafe")]
use proc_macro::TokenTree::Ident;
#[cfg(any(feature = "print_attributes", feature = "hacspec_unsafe"))]
use proc_macro::*;
#[cfg(feature = "print_attributes")]
use quote::quote;
#[cfg(feature = "print_attributes")]
use std::collections::{HashMap, HashSet};
#[cfg(feature = "print_attributes")]
use std::fs::OpenOptions;
#[cfg(any(feature = "print_attributes", feature = "hacspec_unsafe"))]
use syn::{parse_macro_input, spanned::Spanned, ItemFn};
#[cfg(feature = "print_attributes")]
const ITEM_LIST_LOCATION: &str = "./allowed_item_list.json";
macro_rules! declare_attribute {
($id:ident, $key: expr, $msg: expr, $doc:tt, $allowed_item: expr) => {
#[cfg(feature = "print_attributes")]
#[doc=$doc]
#[proc_macro_attribute]
pub fn $id(attr: TokenStream, item: TokenStream) -> TokenStream {
let item_copy = proc_macro2::TokenStream::from(item.clone());
let func = parse_macro_input!(item as ItemFn);
let mut attr_args_iter = attr.into_iter();
let _impl_type_name: Option<String> =
attr_args_iter.next().map(|arg| format!("{}", arg));
let _is_generic: bool = attr_args_iter.next().map_or(false, |_| {
let _ = attr_args_iter.next().expect("Error 7");
true
});
if cfg!(feature = "print_attributes") {
if $allowed_item {
let file = OpenOptions::new()
.read(true)
.open(ITEM_LIST_LOCATION)
.expect("Error 1");
let key_s = String::from($key);
let mut item_list: HashMap<String, HashSet<Signature>> =
serde_json::from_reader(&file).unwrap();
let item_list_type = match item_list.get_mut(&key_s) {
None => {
item_list.insert(key_s.clone(), HashSet::new());
item_list.get_mut(&key_s).expect("Error 2")
}
Some(items) => items,
};
item_list_type.insert(syn_sig_to_reduced(&func.sig));
}
Diagnostic::new(
Level::Note,
format!(
"{}: {} {}",
$msg,
Green.paint(format!("{}", func.sig.ident)),
{
let file = func.sig.span().unwrap().source_file().path();
let start = func.sig.span().start();
format!(
"in file {}, line {}",
Yellow.paint(file.to_str().expect("Error 9")),
Yellow.paint(format!("{}", start.line))
)
}
),
)
.emit()
}
let doc_msg = format!("_{}_\n", $doc);
let output = quote! {
#[doc=#doc_msg]
#item_copy
};
output.into()
}
};
}
declare_attribute!(
in_hacspec,
"in_hacspec",
Blue.paint("Function in hacspec"),
"This function is within the hacspec subset of Rust: its signature and body use only hacspec constructs and
call functions whose signatures are in hacspec.",
true
);
declare_attribute!(
unsafe_hacspec,
"unsafe_hacspec",
Purple.paint("Unsafe hacspec function"),
"This function can be called from hacspec programs but its body features Rust constructs that are not part of hacspec",
true
);
declare_attribute!(
not_hacspec,
"not_hacspec",
Yellow.paint("Function not in hacspec"),
"Function that is not part of the language but is offered as a helper for tests, etc.",
false
);
#[cfg(feature = "hacspec_unsafe")]
#[proc_macro_attribute]
pub fn hacspec_unsafe(attr: TokenStream, item: TokenStream) -> TokenStream {
let item_copy = item.clone();
let func = parse_macro_input!(item_copy as ItemFn);
let outside = match attr.into_iter().next() {
Some(Ident(arg)) => {
if arg.to_string() == "outside" {
true
} else {
false
}
}
Some(_) | None => false,
};
let msg = if outside {
format!("function outside of hacspec")
} else {
format!("unsafe hacspec function")
};
Diagnostic::new(
Level::Note,
format!("{}: {} {}", msg, format!("{}", func.sig.ident), {
let file = func.sig.span().unwrap().source_file().path();
let start = func.sig.span().start();
format!(
"in {}:{}",
file.to_str().unwrap(),
format!("{}", start.line)
)
}),
)
.emit();
item
}
|
use std::fs::File;
use std::mem::size_of;
use std::time::Instant;
use std::{cmp, io};
use log::debug;
use memmap::Mmap;
use crate::INITIAL_SORTER_VEC_SIZE;
use crate::{DEFAULT_COMPRESSION_LEVEL, DEFAULT_SORTER_MEMORY, MIN_SORTER_MEMORY};
use crate::{DEFAULT_NB_CHUNKS, MIN_NB_CHUNKS};
use crate::{Merger, MergerIter};
use crate::{Reader, Error};
use crate::{Writer, WriterBuilder, CompressionType};
#[derive(Debug, Clone, Copy)]
pub struct SorterBuilder<MF> {
pub max_memory: usize,
pub max_nb_chunks: usize,
pub chunk_compression_type: CompressionType,
pub chunk_compression_level: u32,
pub merge: MF,
}
impl<MF> SorterBuilder<MF> {
pub fn new(merge: MF) -> Self {
SorterBuilder {
max_memory: DEFAULT_SORTER_MEMORY,
max_nb_chunks: DEFAULT_NB_CHUNKS,
chunk_compression_type: CompressionType::Snappy,
chunk_compression_level: DEFAULT_COMPRESSION_LEVEL,
merge,
}
}
pub fn max_memory(&mut self, memory: usize) -> &mut Self {
self.max_memory = cmp::max(memory, MIN_SORTER_MEMORY);
self
}
/// The maximum number of chunks on disk, if this number of chunks is reached
/// they will be merged into a single chunk. Merging can reduce the disk usage.
pub fn max_nb_chunks(&mut self, nb_chunks: usize) -> &mut Self {
self.max_nb_chunks = cmp::max(nb_chunks, MIN_NB_CHUNKS);
self
}
pub fn chunk_compression_type(&mut self, compression: CompressionType) -> &mut Self {
self.chunk_compression_type = compression;
self
}
pub fn chunk_compression_level(&mut self, level: u32) -> &mut Self {
self.chunk_compression_level = level;
self
}
pub fn build(self) -> Sorter<MF> {
Sorter {
chunks: Vec::new(),
entries: Vec::with_capacity(INITIAL_SORTER_VEC_SIZE),
entry_bytes: 0,
max_memory: self.max_memory,
max_nb_chunks: self.max_nb_chunks,
chunk_compression_type: self.chunk_compression_type,
chunk_compression_level: self.chunk_compression_level,
merge: self.merge,
}
}
}
struct Entry {
data: Vec<u8>,
key_len: usize,
}
impl Entry {
pub fn new(key: &[u8], val: &[u8]) -> Entry {
let mut data = Vec::new();
data.reserve_exact(key.len() + val.len());
data.extend_from_slice(key);
data.extend_from_slice(val);
Entry { data, key_len: key.len() }
}
pub fn key(&self) -> &[u8] {
&self.data[..self.key_len]
}
pub fn val(&self) -> &[u8] {
&self.data[self.key_len..]
}
}
pub struct Sorter<MF> {
chunks: Vec<File>,
entries: Vec<Entry>,
/// The number of bytes allocated by the entries.
entry_bytes: usize,
max_memory: usize,
max_nb_chunks: usize,
chunk_compression_type: CompressionType,
chunk_compression_level: u32,
merge: MF,
}
impl<MF> Sorter<MF> {
pub fn builder(merge: MF) -> SorterBuilder<MF> {
SorterBuilder::new(merge)
}
pub fn new(merge: MF) -> Sorter<MF> {
SorterBuilder::new(merge).build()
}
}
impl<MF, U> Sorter<MF>
where MF: Fn(&[u8], &[Vec<u8>]) -> Result<Vec<u8>, U>
{
pub fn insert<K, V>(&mut self, key: K, val: V) -> Result<(), Error<U>>
where K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
let key = key.as_ref();
let val = val.as_ref();
let ent = Entry::new(key, val);
self.entry_bytes += ent.data.len();
self.entries.push(ent);
let entries_vec_size = self.entries.capacity() * size_of::<Entry>();
if self.entry_bytes + entries_vec_size >= self.max_memory {
self.write_chunk()?;
if self.chunks.len() > self.max_nb_chunks {
self.merge_chunks()?;
}
}
Ok(())
}
fn write_chunk(&mut self) -> Result<(), Error<U>> {
debug!("writing a chunk...");
let before_write = Instant::now();
let file = tempfile::tempfile()?;
let mut writer = WriterBuilder::new()
.compression_type(self.chunk_compression_type)
.compression_level(self.chunk_compression_level)
.build(file);
self.entries.sort_unstable_by(|a, b| a.key().cmp(&b.key()));
let mut current = None;
for entry in self.entries.drain(..) {
match current.as_mut() {
None => {
let key = entry.key().to_vec();
let val = entry.val().to_vec();
current = Some((key, vec![val]));
},
Some((key, vals)) => {
if key == &entry.key() {
vals.push(entry.val().to_vec());
} else {
let merged_val = if vals.len() == 1 {
vals.pop().unwrap()
} else {
(self.merge)(&key, &vals).map_err(Error::Merge)?
};
writer.insert(&key, &merged_val)?;
key.clear();
vals.clear();
key.extend_from_slice(entry.key());
vals.push(entry.val().to_vec());
}
}
}
}
if let Some((key, mut vals)) = current.take() {
let merged_val = if vals.len() == 1 {
vals.pop().unwrap()
} else {
(self.merge)(&key, &vals).map_err(Error::Merge)?
};
writer.insert(&key, &merged_val)?;
}
let file = writer.into_inner()?;
self.chunks.push(file);
self.entry_bytes = 0;
debug!("writing a chunk took {:.02?}", before_write.elapsed());
Ok(())
}
fn merge_chunks(&mut self) -> Result<(), Error<U>> {
debug!("merging {} chunks...", self.chunks.len());
let before_merge = Instant::now();
let original_num_chunks = self.chunks.len();
let file = tempfile::tempfile()?;
let mut writer = WriterBuilder::new()
.compression_type(self.chunk_compression_type)
.compression_level(self.chunk_compression_level)
.build(file);
// Drain the chunks to mmap them and store them into a vector.
let sources: Result<Vec<_>, Error<U>> = self.chunks.drain(..).map(|f| unsafe {
let mmap = Mmap::map(&f)?;
Reader::new(mmap).map_err(Error::convert_merge_error)
}).collect();
// Create a merger to merge all those chunks.
let mut builder = Merger::builder(&self.merge);
builder.extend(sources?);
let merger = builder.build();
let mut iter = merger.into_merge_iter().map_err(Error::convert_merge_error)?;
while let Some(result) = iter.next() {
let (key, val) = result?;
writer.insert(key, val)?;
}
let file = writer.into_inner()?;
self.chunks.push(file);
debug!("merging {} chunks took {:.02?}", original_num_chunks, before_merge.elapsed());
Ok(())
}
pub fn write_into<W: io::Write>(self, writer: &mut Writer<W>) -> Result<(), Error<U>> {
let mut iter = self.into_iter()?;
while let Some(result) = iter.next() {
let (key, val) = result?;
writer.insert(key, val)?;
}
Ok(())
}
pub fn into_iter(mut self) -> Result<MergerIter<Mmap, MF>, Error<U>> {
// Flush the pending unordered entries.
self.write_chunk()?;
let sources: Result<Vec<_>, Error<U>> = self.chunks.into_iter().map(|f| unsafe {
let mmap = Mmap::map(&f)?;
Reader::new(mmap).map_err(Error::convert_merge_error)
}).collect();
let mut builder = Merger::builder(self.merge);
builder.extend(sources?);
builder.build().into_merge_iter().map_err(Error::convert_merge_error)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn simple() {
fn merge(_key: &[u8], vals: &[Vec<u8>]) -> Result<Vec<u8>, ()> {
assert_ne!(vals.len(), 1);
Ok(vals.iter().flatten().cloned().collect())
}
let mut sorter = SorterBuilder::new(merge)
.chunk_compression_type(CompressionType::Snappy)
.build();
sorter.insert(b"hello", "kiki").unwrap();
sorter.insert(b"abstract", "lol").unwrap();
sorter.insert(b"allo", "lol").unwrap();
sorter.insert(b"abstract", "lol").unwrap();
let mut bytes = WriterBuilder::new().memory();
sorter.write_into(&mut bytes).unwrap();
let bytes = bytes.into_inner().unwrap();
let rdr = Reader::new(bytes.as_slice()).unwrap();
let mut iter = rdr.into_iter().unwrap();
while let Some(result) = iter.next() {
let (key, val) = result.unwrap();
match key {
b"hello" => assert_eq!(val, b"kiki"),
b"abstract" => assert_eq!(val, b"lollol"),
b"allo" => assert_eq!(val, b"lol"),
_ => panic!(),
}
}
}
}
|
#[get("/params/<id>")]
pub fn params(id: Option<usize>) -> String {
match id {
Some(n) => format!("usize: {}", n),
None => "Not a usize".to_string(),
}
}
|
use crossbeam_utils::thread;
use itertools::Itertools;
use std::{
io::{Read, Write},
net::TcpListener,
path::Path,
process::{Command, Stdio},
sync::mpsc,
};
type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;
fn main() -> Result<()> {
let listener = TcpListener::bind("0.0.0.0:0")?;
println!("server listener: {}", listener.local_addr()?);
let sig_listener = TcpListener::bind("0.0.0.0:0")?;
println!("signal listener: {}", sig_listener.local_addr()?);
std::thread::spawn(move || -> std::io::Result<()> {
for sig in sig_listener.incoming() {
let mut sig = sig?;
loop {
let n = sig.read(&mut [0; 1])?;
if n == 0 {
break;
}
Command::new("fish")
.arg("-c")
.arg("pkill -2 fish")
.spawn()?
.wait()?;
}
}
Ok(())
});
let mut input = String::new();
// accept connections and process them serially
let listener = listener.incoming().chunks(2);
let mut listener = listener.into_iter();
loop {
let mut stream = match listener.next() {
Some(s) => s,
None => break Ok(()),
};
let mut stream_write = stream.next().ok_or("client should send this stream")??;
let mut stream_read = stream.next().ok_or("client should send this stream")??;
stream_write.read_to_string(&mut input)?;
dbg!(&input);
if input.starts_with("cd") {
let dir = input.split_whitespace().nth(1);
if let Some(dir) = dir {
std::env::set_current_dir(dir)?;
}
} else if input.starts_with("vim") {
let file = Path::new(input.strip_prefix("vim").expect("already checked").trim());
let file_name = file
.file_name()
.ok_or("Could not read filename")?
.to_str()
.ok_or("Could not read filename")?;
let data = std::fs::read_to_string(&file).unwrap_or_default();
stream_read.write_all(b"?vim")?;
stream_read.flush()?;
stream_read.write_all(file_name.as_bytes())?;
stream_read.write_all(b"???")?;
stream_read.write_all(data.as_bytes())?;
drop(stream_read);
// get result
let mut stream = listener.next().ok_or("client should send this stream")?;
let mut stream_write = stream.next().ok_or("client should send this stream")??;
let _stream_read = stream.next().ok_or("client should send this stream")??;
let mut client_data = String::new();
stream_write.read_to_string(&mut client_data)?;
std::fs::write(&file, client_data)?;
} else {
let mut process = Command::new("fish")
.arg("-c")
.arg(&input)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
let mut stdout = process.stdout.take().expect("stdout is piped");
let mut stderr = process.stderr.take().expect("stderr is piped");
let (tx, rx) = mpsc::channel();
let read_process_and_write_to_stream =
|out: &mut dyn Read, tx: mpsc::Sender<Vec<u8>>| -> std::io::Result<bool> {
let mut out_buf = [0; 512];
let out_n = out.read(&mut out_buf)?;
if out_n != 0 {
tx.send(out_buf[..out_n].to_vec()).map_err(|_| {
std::io::Error::new(
std::io::ErrorKind::Other,
"failed to send data to write thread",
)
})?;
}
Ok(out_n == 0)
};
thread::scope(move |s| {
let tx_c = tx.clone();
s.spawn(move |_| {
while let Ok(false) =
read_process_and_write_to_stream(&mut stdout, tx_c.clone())
{
}
});
s.spawn(move |_| {
while let Ok(false) = read_process_and_write_to_stream(&mut stderr, tx.clone())
{
}
});
s.spawn(move |_| -> std::io::Result<()> {
while let Ok(data) = rx.recv() {
stream_read.write_all(&data)?;
}
Ok(())
});
})
.map_err(|e| format!("read/write threads failed {:?}", e))?;
}
input.clear();
}
}
|
mod parse;
use parse::parser::*;
use serde_json;
use std::io::{self, Read};
fn main() -> io::Result<()> {
let mut buffer = String::new();
io::stdin().read_to_string(&mut buffer)?;
let page = parse::page::ParlerPage::from_html(&buffer).unwrap();
//println!("{:#?}", page);
serde_json::to_writer_pretty(io::stdout(), &page)?;
Ok(())
}
|
use bitflags::bitflags;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
pub mod gateway;
pub const BASE_URL: &'static str = "https://discordapp.com/api";
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct GatewayResponse {
pub url: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Error {
// for rate limits
pub global: Option<bool>,
// in ms
pub retry_after: Option<u64>,
// max error code is 90001
pub code: Option<u32>,
pub message: String,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct User {
pub id: Snowflake,
pub username: String,
pub discriminator: String,
pub avatar: Option<String>,
pub bot: Option<bool>,
pub mfa_enabled: Option<bool>,
pub locale: Option<String>,
pub verified: Option<bool>,
pub email: Option<String>,
pub phone: Option<String>,
pub flags: Option<u64>,
pub premium_type: Option<u64>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct GuildMember {
pub user: User,
pub nick: Option<String>,
pub roles: Vec<Snowflake>,
pub joined_at: String,
pub deaf: bool,
pub mute: bool,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Role {
pub color: u32,
pub hoist: bool,
pub id: Snowflake,
pub managed: bool,
pub mentionable: bool,
pub name: String,
pub permissions: Permissions,
pub position: u64,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct Snowflake(u64);
impl ::std::fmt::Display for Snowflake {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "{}", self.0)
}
}
use serde::de::{self, Visitor};
use std::fmt;
struct SnowflakeVisitor;
impl<'de> Visitor<'de> for SnowflakeVisitor {
type Value = Snowflake;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("A string that can be deserialized into a u64")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
value
.parse::<u64>()
.map(|v| Snowflake(v))
.map_err(|e| serde::de::Error::custom(e))
}
}
impl<'de> Deserialize<'de> for Snowflake {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserializer.deserialize_str(SnowflakeVisitor)
}
}
impl Serialize for Snowflake {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&format!("{:?}", self.0))
}
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Guild {
pub id: Snowflake,
pub name: String,
pub icon: Option<String>,
pub owner: bool,
pub permissions: Permissions, // Oh no they've encoded them strangely
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Channel {
pub id: Snowflake,
#[serde(rename = "type")]
pub ty: u8,
pub guild_id: Option<Snowflake>,
pub position: Option<u64>,
pub permission_overwrites: Vec<Overwrite>,
pub name: Option<String>,
pub topic: Option<String>,
pub nsfw: Option<bool>,
pub last_message_id: Option<Snowflake>,
pub bitrate: Option<u64>,
pub user_limit: Option<u64>,
pub rate_limit_per_user: Option<u64>,
pub recipients: Option<Vec<User>>,
pub icon: Option<String>,
pub owner_id: Option<Snowflake>,
pub application_id: Option<Snowflake>,
pub parent_id: Option<Snowflake>,
pub last_pin_timestamp: Option<Timestamp>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Timestamp(String);
#[derive(Clone, Debug)]
pub enum ChannelType {
GuildText,
Dm,
GuildVoice,
GroupDm,
GuildCategory,
}
struct ChannelTypeVisitor;
impl<'de> ::serde::de::Visitor<'de> for ChannelTypeVisitor {
type Value = ChannelType;
fn expecting(&self, formatter: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
formatter.write_str("an integer")
}
fn visit_u64<E>(self, value: u64) -> Result<Self::Value, E>
where
E: ::serde::de::Error,
{
match value {
0 => Ok(ChannelType::GuildText),
1 => Ok(ChannelType::Dm),
2 => Ok(ChannelType::GuildVoice),
3 => Ok(ChannelType::GroupDm),
4 => Ok(ChannelType::GuildCategory),
_ => Err(::serde::de::Error::custom(format!(
"invalid channel type {}",
value
))),
}
}
}
impl<'de> ::serde::Deserialize<'de> for ChannelType {
fn deserialize<D>(deserializer: D) -> Result<ChannelType, D::Error>
where
D: ::serde::Deserializer<'de>,
{
deserializer.deserialize_any(ChannelTypeVisitor)
}
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Overwrite {
pub id: Snowflake,
#[serde(rename = "type")]
pub ty: OverwriteType,
pub allow: Permissions,
pub deny: Permissions,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum OverwriteType {
Role,
Member,
}
#[derive(serde::Deserialize)]
struct MessageAck {
pub timestamp: String,
pub id: Snowflake,
pub author: User,
pub content: String,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Message<'a> {
pub id: Snowflake,
pub channel_id: Snowflake,
pub guild_id: Option<Snowflake>,
// There's an author field but it's an untagged enum
pub author: User,
//pub member: Option<PartialGuild>,
#[serde(borrow)]
pub content: Cow<'a, str>,
pub timestamp: &'a str,
pub edited_timestamp: Option<&'a str>,
pub tts: bool,
pub mention_everyone: bool,
pub mentions: Vec<User>,
pub mention_roles: Vec<Snowflake>,
pub attachments: Vec<Attachment>,
pub embeds: Vec<Embed>,
pub reactions: Option<Vec<Reaction>>,
pub nonce: Option<Snowflake>,
pub pinned: bool,
pub webhook_id: Option<Snowflake>,
#[serde(rename = "type")]
pub ty: u64,
//pub activity: Option<MessageActivity>,
//pub application: Option<MessageApplication>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Attachment {
pub id: Snowflake,
pub filename: String,
pub size: u64,
pub url: String,
pub proxy_url: String,
pub height: Option<u64>,
pub width: Option<u64>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Embed {
pub title: Option<String>,
#[serde(rename = "type")]
pub ty: Option<String>,
pub description: Option<String>,
pub url: Option<String>,
pub timestamp: Option<String>,
pub color: Option<u32>,
pub author: Option<EmbedAuthor>, // undocumented
pub video: Option<Video>, // undocumented
pub provider: Option<Provider>, // undocumented
pub thumbnail: Option<Image>, //undocumented
pub fields: Option<Vec<EmbedField>>, // undocumented
pub footer: Option<Footer>, // undocumented
pub image: Option<Image>, // undocumented
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Footer {
pub text: String,
pub icon_url: Option<String>,
pub proxy_icon_url: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct EmbedField {
pub inline: bool,
pub name: String,
pub value: String,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Image {
pub height: u64,
pub url: String,
pub width: u64,
pub proxy_url: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Provider {
pub name: String,
pub url: Option<String>,
pub icon_url: Option<String>,
pub proxy_url: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Video {
pub height: u64,
pub width: u64,
pub url: String,
pub proxy_url: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct EmbedAuthor {
pub url: Option<String>,
pub name: String,
pub icon_url: Option<String>,
pub proxy_url: Option<String>,
pub proxy_icon_url: Option<String>,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Reaction {
count: u64,
me: bool,
emoji: Emoji,
}
#[derive(Clone, Debug, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct Emoji {
id: Option<Snowflake>,
name: String,
roles: Option<Vec<Role>>,
user: Option<User>,
require_colons: Option<bool>,
managed: Option<bool>,
animated: Option<bool>,
}
// All this was taken from spacemaniac/discord-rs
macro_rules! serial_single_field {
($typ:ident as $field:ident: $inner:path) => {
impl ::serde::Serialize for $typ {
fn serialize<S: ::serde::ser::Serializer>(
&self,
s: S,
) -> ::std::result::Result<S::Ok, S::Error> {
self.$field.serialize(s)
}
}
impl<'d> ::serde::Deserialize<'d> for $typ {
fn deserialize<D: ::serde::de::Deserializer<'d>>(
d: D,
) -> ::std::result::Result<$typ, D::Error> {
<$inner as ::serde::de::Deserialize>::deserialize(d).map(|v| $typ { $field: v })
}
}
};
}
serial_single_field!(Permissions as bits: u64);
bitflags! {
/// Set of permissions assignable to a Role or PermissionOverwrite
pub struct Permissions: u64 {
const CREATE_INVITE = 1;
const KICK_MEMBERS = 1 << 1;
const BAN_MEMBERS = 1 << 2;
/// Grant all permissions, bypassing channel-specific permissions
const ADMINISTRATOR = 1 << 3;
/// Modify roles below their own
const MANAGE_ROLES = 1 << 28;
/// Create channels or edit existing ones
const MANAGE_CHANNELS = 1 << 4;
/// Change the server's name or move regions
const MANAGE_SERVER = 1 << 5;
/// Change their own nickname
const CHANGE_NICKNAMES = 1 << 26;
/// Change the nickname of other users
const MANAGE_NICKNAMES = 1 << 27;
/// Manage the emojis in a a server.
const MANAGE_EMOJIS = 1 << 30;
/// Manage channel webhooks
const MANAGE_WEBHOOKS = 1 << 29;
const READ_MESSAGES = 1 << 10;
const SEND_MESSAGES = 1 << 11;
/// Send text-to-speech messages to those focused on the channel
const SEND_TTS_MESSAGES = 1 << 12;
/// Delete messages by other users
const MANAGE_MESSAGES = 1 << 13;
const EMBED_LINKS = 1 << 14;
const ATTACH_FILES = 1 << 15;
const READ_HISTORY = 1 << 16;
/// Trigger a push notification for an entire channel with "@everyone"
const MENTION_EVERYONE = 1 << 17;
/// Use emojis from other servers
const EXTERNAL_EMOJIS = 1 << 18;
/// Add emoji reactions to messages
const ADD_REACTIONS = 1 << 6;
const VOICE_CONNECT = 1 << 20;
const VOICE_SPEAK = 1 << 21;
const VOICE_MUTE_MEMBERS = 1 << 22;
const VOICE_DEAFEN_MEMBERS = 1 << 23;
/// Move users out of this channel into another
const VOICE_MOVE_MEMBERS = 1 << 24;
/// When denied, members must use push-to-talk
const VOICE_USE_VAD = 1 << 25;
}
}
|
use apllodb_immutable_schema_engine_domain::abstract_types::ImmutableSchemaAbstractTypes;
use super::{
sqlite_rowid::SqliteRowid,
transaction::sqlite_tx::{
version::repository_impl::VersionRepositoryImpl,
version_revision_resolver::VersionRevisionResolverImpl,
vtable::repository_impl::VTableRepositoryImpl,
},
};
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Default)]
pub(crate) struct SqliteTypes;
impl ImmutableSchemaAbstractTypes for SqliteTypes {
type VrrId = SqliteRowid;
type VTableRepo = VTableRepositoryImpl;
type VersionRepo = VersionRepositoryImpl;
type Vrr = VersionRevisionResolverImpl;
}
// Fill structs' type parameters in domain / application layers.
pub(crate) type VrrEntriesInVersion =
apllodb_immutable_schema_engine_domain::version_revision_resolver::vrr_entries_in_version::VrrEntriesInVersion<SqliteTypes>;
pub(crate) type VrrEntries =
apllodb_immutable_schema_engine_domain::version_revision_resolver::vrr_entries::VrrEntries<
SqliteTypes,
>;
pub(crate) type VrrEntry =
apllodb_immutable_schema_engine_domain::version_revision_resolver::vrr_entry::VrrEntry<
SqliteTypes,
>;
pub(crate) type RowSelectionPlan =
apllodb_immutable_schema_engine_domain::row_selection_plan::RowSelectionPlan<SqliteTypes>;
|
use std::io::{self};
fn main() {
let mut a = String::new();
io::stdin().read_line(&mut a).unwrap();
let mut bc = String::new();
io::stdin().read_line(&mut bc).unwrap();
let bcvec: Vec<&str> = bc.trim_end_matches('\n').split(" ").collect();
let sum = a.trim_end_matches('\n').parse::<i32>().unwrap() + bcvec[0].parse::<i32>().unwrap() + bcvec[1].parse::<i32>().unwrap();
let mut s = String::new();
io::stdin().read_line(&mut s).unwrap();
print!("{} {}", sum, s);
}
|
use glium::{Frame, Surface};
use crate::math::{normalize_vector, cross_product};
#[derive(Copy, Clone)]
pub struct Camera {
pub position: [f32; 3],
pub height: f32,
pub direction: [f32; 3],
pub pitch_yaw: (f32, f32)
}
pub const UP: [f32; 3] = [0.0, 1.0, 0.0];
impl Camera {
pub fn new(position: [f32; 3], height: f32) -> Self {
Camera {
position: position,
height: height,
direction: [0.0, 0.0, 0.0],
pitch_yaw: (0.0, 0.0)
}
}
pub fn update_direction(&mut self) {
self.direction = normalize_vector(&[
self.pitch_yaw.1.cos() * self.pitch_yaw.0.cos(),
self.pitch_yaw.0.sin(),
self.pitch_yaw.1.sin() * self.pitch_yaw.0.cos()
]);
}
pub fn view_matrix(&self) -> [[f32; 4]; 4] {
let s = normalize_vector(&cross_product(&UP, &self.direction));
let u = cross_product(&self.direction, &s);
let position = [self.position[0], self.position[1] + self.height, self.position[2]];
let p = [-position[0] * s[0] - position[1] * s[1] - position[2] * s[2],
-position[0] * u[0] - position[1] * u[1] - position[2] * u[2],
-position[0] * self.direction[0] - position[1] * self.direction[1] - position[2] * self.direction[2]];
[
[s[0], u[0], self.direction[0], 0.0],
[s[1], u[1], self.direction[1], 0.0],
[s[2], u[2], self.direction[2], 0.0],
[p[0], p[1], p[2], 1.0]
]
}
}
pub fn perspective_matrix(target: &mut Frame) -> [[f32; 4]; 4] {
let (width, height) = target.get_dimensions();
let aspect_ratio = height as f32 / width as f32;
let fov: f32 = 3.141592 / 3.0;
let zfar = 1024.0;
let znear = 0.1;
let f = 1.0 / (fov / 2.0).tan();
[
[f * aspect_ratio, 0.0, 0.0, 0.0],
[0.0, f, 0.0, 0.0],
[0.0, 0.0, (zfar + znear) / (zfar - znear), 1.0],
[0.0, 0.0, -(2.0 * zfar * znear) / (zfar - znear), 0.0]
]
}
|
extern crate rustc_serialize;
extern crate pcap;
use rustc_serialize::Encodable;
#[derive(RustcDecodable, RustcEncodable)]
pub struct PacketContainer<T: Encodable> {
pub host_identifier: String,
pub data: Vec<T>,
}
#[derive(RustcDecodable, RustcEncodable)]
pub struct PingPacket {
pub timestamp: i64,
pub srcip: String,
pub dstip: String,
pub id: u16,
pub seq: u16,
}
impl<'a> From<::pcap::Packet<'a>> for PingPacket {
fn from(packet: ::pcap::Packet) -> PingPacket {
let srcip = ::std::net::Ipv4Addr::new(packet.data[26],
packet.data[27],
packet.data[28],
packet.data[29]);
let dstip = ::std::net::Ipv4Addr::new(packet.data[30],
packet.data[31],
packet.data[32],
packet.data[33]);
PingPacket {
timestamp: packet.header.ts.tv_sec,
srcip: srcip.to_string(),
dstip: dstip.to_string(),
id: ((packet.data[38] as u16) << 8 | (packet.data[39] as u16)),
seq: ((packet.data[40] as u16) << 8 | (packet.data[41] as u16)),
}
}
}
|
use bitwise::bitwiseops;
pub trait BlockCipher {
fn process_block(&self, input: &[u8]) -> Vec<u8>;
}
pub struct SingleCharXorCipher {
key: u8,
}
impl SingleCharXorCipher {
pub fn new(key: u8) -> SingleCharXorCipher {
SingleCharXorCipher {
key: key,
}
}
}
pub struct XorCipher {
key: Vec<u8>,
}
impl XorCipher {
fn new(key: Vec<u8>) -> XorCipher {
XorCipher {
key: key.clone(),
}
}
}
impl BlockCipher for SingleCharXorCipher {
fn process_block(&self, input: &[u8]) -> Vec<u8> {
bitwiseops::xor_with_char(self.key, input)
}
}
impl BlockCipher for XorCipher {
fn process_block(&self, input: &[u8]) -> Vec<u8> {
bitwiseops::xor_with_key(&self.key, input)
}
} |
use std::sync::mpsc::channel;
use std::sync::mpsc::Sender;
use std::sync::Arc;
use std::sync::Mutex;
use std::thread;
type Thunk<'a> = Box<dyn FnOnce() + Send + 'a>;
pub struct ThreadPool {
job_sender: Sender<Thunk<'static>>,
}
impl ThreadPool {
pub fn new(capacity: usize) -> Self {
let (tx, rx) = channel::<Thunk<'static>>();
let mrx = Arc::new(Mutex::new(rx));
for _ in 0..capacity {
let job_receiver = mrx.clone();
thread::spawn(move || loop {
let msg = {
let lock = job_receiver.lock().unwrap();
lock.recv()
};
match msg {
Ok(f) => f(),
_ => (),
}
});
}
Self { job_sender: tx }
}
pub fn run<F>(&mut self, job: F)
where
F: FnOnce() + Send + 'static,
{
self.job_sender.send(Box::new(job)).unwrap();
}
}
|
use std::collections::HashMap;
use apllodb_shared_components::{ApllodbError, ApllodbResult, SessionId};
use generational_arena::{Arena, Index};
use crate::sqlite::database::SqliteDatabase;
#[derive(Debug, Default)]
pub(crate) struct SqliteDatabasePool {
pub(crate) db_arena: Arena<SqliteDatabase>,
pub(crate) sess_db: HashMap<SessionId, Index>,
}
impl SqliteDatabasePool {
/// # Failures
///
/// - [ConnectionExceptionDatabaseNotOpen](apllodb-shared-components::SqlState::ConnectionExceptionDatabaseNotOpen) when:
/// - this session seems not to open any database.
pub(crate) fn get_db(&self, sid: &SessionId) -> ApllodbResult<&SqliteDatabase> {
let err = || {
ApllodbError::connection_exception_database_not_open(format!(
"session `{:?}` does not opens any database",
sid
))
};
let db_idx = *self.sess_db.get(sid).ok_or_else(err)?;
let db = self.db_arena.get(db_idx).ok_or_else(err)?;
Ok(db)
}
/// # Failures
///
/// - [ConnectionExceptionDatabaseNotOpen](apllodb-shared-components::SqlState::ConnectionExceptionDatabaseNotOpen) when:
/// - this session seems not to open any database.
#[allow(dead_code)]
pub(crate) fn remove_db(&mut self, sid: &SessionId) -> ApllodbResult<SqliteDatabase> {
let err = || {
ApllodbError::connection_exception_database_not_open(format!(
"session `{:?}` does not open any database",
sid
))
};
let db_idx = self.sess_db.remove(sid).ok_or_else(err)?;
let db = self.db_arena.remove(db_idx).ok_or_else(err)?;
Ok(db)
}
/// # Failures
///
/// - [ConnectionExceptionDatabaseAlreadyOpen](apllodb-shared-components::SqlState::ConnectionExceptionDatabaseAlreadyOpen) when:
/// - this session seems to open another database.
pub(crate) fn insert_db(&mut self, sid: &SessionId, db: SqliteDatabase) -> ApllodbResult<()> {
let db_idx = self.db_arena.insert(db);
if self.sess_db.insert(*sid, db_idx).is_some() {
Err(ApllodbError::connection_exception_database_already_open(
format!("session `{:?}` already opens another database", sid),
))
} else {
Ok(())
}
}
}
|
#![no_std]
use zoon::*;
blocks!{
#[derive(Copy, Clone)]
enum Color {
Red,
Blue,
}
#[s_var]
fn color() -> Color {
Color::A
}
#[update]
fn toggle_color() {
use Color::{Red, Blue};
color().update(|color| if let Red = color { Blue } else { Red });
}
#[cache]
fn fill_style() -> JsValue {
let color = if let Color::Red = color.inner() { "red" } else { "blue" };
JsValue::from(color)
}
#[el]
fn root() -> Row {
let fill_style = fill_style();
row![
canvas![
canvas::width(300),
canvas::height(300),
canvas::on_ready(|canvas| {
let ctx = canvas.context_2d();
ctx.lineWidth = 10;
fill_style.use_ref(|style| ctx.set_fill_style(style));
// Wall
ctx.stroke_rect(75., 140., 150., 110.);
// Door
ctx.fill_rect(130., 190., 40., 60.);
// Roof
ctx.begin_path();
ctx.move_to(50., 140.);
ctx.line_to(150., 60.);
ctx.line_to(250., 140.);
ctx.close_path();
ctx.stroke();
});
],
button![
button::on_press(toggle_color),
"Change color",
]
]
}
}
#[wasm_bindgen(start)]
pub fn start() {
start!()
}
|
use super::super::{components, input, resources};
use specs::{Read, ReadStorage, System, WriteStorage};
pub struct PlayerInput;
impl<'a> System<'a> for PlayerInput {
type SystemData = (
ReadStorage<'a, components::Player>,
ReadStorage<'a, components::Position>,
WriteStorage<'a, components::Moved>,
Read<'a, resources::PendingAction>,
specs::Entities<'a>,
Read<'a, specs::LazyUpdate>,
);
fn run(
&mut self,
(player, position, mut moved, pending_action, entities, updater): Self::SystemData,
) {
use specs::Join;
let pending_action = &pending_action.0;
match pending_action {
Some(input::Action::Up) => {
for (entity, pos, _, mv) in
(&entities, &position, &player, (&mut moved).maybe()).join()
{
if let Some(mv) = mv {
mv.to = pos.up();
} else {
let mv = components::Moved {
from: pos.clone(),
to: pos.up(),
};
updater.insert(entity, mv);
}
}
}
Some(input::Action::Down) => {
for (entity, pos, _, mv) in
(&entities, &position, &player, (&mut moved).maybe()).join()
{
if let Some(mv) = mv {
mv.to = pos.down();
} else {
let mv = components::Moved {
from: pos.clone(),
to: pos.down(),
};
updater.insert(entity, mv);
}
}
}
Some(input::Action::Right) => {
for (entity, pos, _, mv) in
(&entities, &position, &player, (&mut moved).maybe()).join()
{
if let Some(mv) = mv {
mv.to = pos.right();
} else {
let mv = components::Moved {
from: pos.clone(),
to: pos.right(),
};
updater.insert(entity, mv);
}
}
}
Some(input::Action::Left) => {
for (entity, pos, _, mv) in
(&entities, &position, &player, (&mut moved).maybe()).join()
{
if let Some(mv) = mv {
mv.to = pos.left();
} else {
let mv = components::Moved {
from: pos.clone(),
to: pos.left(),
};
updater.insert(entity, mv);
}
}
}
Some(_) => (),
None => (),
};
}
}
#[cfg(test)]
mod tests {
use super::*;
use specs::{Builder, RunNow, World, WorldExt};
fn test_movement(
pending_action: Option<input::Action>,
start: components::Position,
expected: components::Position,
) {
let mut world = World::new();
world.register::<components::Player>();
world.register::<components::Position>();
world.register::<components::Moved>();
world.insert(resources::PendingAction(pending_action));
let ent_player = world
.create_entity()
.with(start.clone())
.with(components::Player)
.build();
let ent_npc = world.create_entity().with(start.clone()).build();
let mut player_input = PlayerInput;
player_input.run_now(&world);
world.maintain();
let read_moved = world.read_storage::<components::Moved>();
let player_move = read_moved.get(ent_player);
if start != expected {
match player_move {
None => panic!("Player Moved component not found at all"),
Some(mv) => {
assert_eq!(mv.from, start);
assert_eq!(mv.to, expected);
}
};
} else {
match player_move {
None => (),
Some(_) => {
panic!(
"Did not expect to see a Moved component be added when expected == start"
);
}
}
}
// Make sure we don't move things without the Player component
let npc_move = read_moved.get(ent_npc);
match npc_move {
None => (),
Some(_) => {
panic!("Should not have found a Moved component on NPC");
}
};
}
#[test]
fn doesnt_move_when_no_actions_pending() {
test_movement(
None,
components::Position::new(5, -3),
components::Position::new(5, -3),
);
}
#[test]
fn moves_up_when_pressed() {
// Negative Y is up
test_movement(
Some(input::Action::Up),
components::Position::new(5, -3),
components::Position::new(5, -4),
);
}
#[test]
fn moves_down_when_pressed() {
// Positive Y is down
test_movement(
Some(input::Action::Down),
components::Position::new(5, -3),
components::Position::new(5, -2),
);
}
#[test]
fn moves_right_when_pressed() {
test_movement(
Some(input::Action::Right),
components::Position::new(5, -3),
components::Position::new(6, -3),
);
}
#[test]
fn moves_left_when_pressed() {
test_movement(
Some(input::Action::Left),
components::Position::new(5, -3),
components::Position::new(4, -3),
);
}
}
|
use super::*;
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
#[repr(transparent)]
pub struct ObjAttr2(u16);
impl ObjAttr2 {
const_new!();
bitfield_int!(u16; 0..=9: u16, tile_index, with_tile_index, set_tile_index);
bitfield_int!(u16; 10..=11: u16, priority, with_priority, set_priority);
bitfield_int!(u16; 12..=15: u16, palbank_index, with_palbank_index, set_palbank_index);
}
|
use crate::days::day16::{parse_input, TicketInput};
use crate::days::day16::default_input;
use std::collections::HashMap;
pub fn run() {
println!("{}", tickets_str(default_input()).unwrap());
}
pub fn tickets_str(input: &str) -> Result<i64, ()> {
tickets(parse_input(input))
}
pub fn tickets(input: TicketInput) -> Result<i64, ()> {
let valid_tickets : Vec<_> = input.other_tickets.iter().filter(|ticket| {
ticket.iter().all(|n| {
input.rules.values().any(|rule| rule.iter().any(|r| (*n <= r.upper) & (*n >= r.lower)))
})
}).collect();
let mut options = HashMap::new();
for (rule, ranges) in &input.rules {
let mut valid_options = Vec::new();
for i in 0..valid_tickets[0].len() {
if valid_tickets.iter().all(|ticket| ranges.iter().any(|r| (ticket[i] <= r.upper) & (ticket[i] >= r.lower))) {
valid_options.push(i);
}
}
options.insert(rule, valid_options);
}
let mut mappings = HashMap::new();
while mappings.len() < input.rules.len() {
let mut match_rule = None;
let mut match_value = None;
for rule in options.keys() {
let values = options.get(rule).unwrap();
if values.len() == 1 {
match_rule = Some(rule.clone());
match_value = Some(values[0].clone());
break;
}
}
options.remove(match_rule.unwrap());
options = options.iter()
.map(|(k, v)| {
(*k, v.iter().cloned().filter(|n| *n != match_value.unwrap()).collect())
})
.collect();
mappings.insert(match_rule.unwrap(), match_value.unwrap());
}
Ok(mappings.keys()
.filter(|k| (***k).starts_with("departure"))
.map(|k| *mappings.get(k).unwrap())
.map(|i| input.ticket[i])
.product())
}
#[cfg(tests)]
pub mod tests {
use super::*;
#[test]
pub fn part2_answer() {
assert_eq!(tickets_str(default_input()), 51240700105297)
}
} |
use std::collections::HashSet;
use proc_macro2::{TokenStream as Tokens, Span};
use syn::{Data, DeriveInput, Fields, DataEnum, Ident};
use quote::quote;
pub fn derive(input: &DeriveInput) -> Tokens {
let name = &input.ident;
let generics = super::add_trait_bounds(
input.generics.clone(),
&HashSet::new(),
&[quote!{ logpack::Encoder }],
);
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
let encoder_fields = match &input.data {
Data::Enum(data) => encoder_for_enum(name, data, false),
Data::Struct(variant_data) => encoder_for_struct(&variant_data.fields, false),
Data::Union{..} => { panic!() }
};
let sizer_fields = match &input.data {
Data::Enum(data) => encoder_for_enum(name, &data, true),
Data::Struct(variant_data) => encoder_for_struct(&variant_data.fields, true),
Data::Union{..} => { panic!() }
};
let result = quote! {
impl #impl_generics logpack::Encoder for #name #ty_generics #where_clause {
fn logpack_encode(&self, _buf: &mut logpack::buffers::BufEncoder) -> Result<(), (usize, usize)> {
#encoder_fields;
Ok(())
}
fn logpack_sizer(&self) -> usize {
#sizer_fields
}
}
};
result
}
fn encoder_for_struct(fields: &Fields, sized: bool) -> Tokens {
match fields {
Fields::Named(ref fields) => {
let fields : Vec<_> = fields.named.iter().collect();
encoder_for_struct_kind(Some(&fields[..]), true, sized)
},
Fields::Unnamed(ref fields) => {
let fields : Vec<_> = fields.unnamed.iter().collect();
encoder_for_struct_kind(Some(&fields[..]), false, sized)
},
Fields::Unit => {
encoder_for_struct_kind(None, false, sized)
},
}
}
fn encoder_for_enum_struct<'a>(name: &Ident, ident: &Ident,
fields: Vec<FieldExt<'a>>, prefix: Tokens,
named: bool, sizer: bool, header_size: usize) -> Tokens {
let one_ref = fields.iter().map(|v| {
let ident = &v.get_match_ident();
quote! { ref #ident }
});
let fields_match = match named {
false => quote!(( #(#one_ref),* )),
true => quote!({ #(#one_ref),* }),
};
let body = if sizer {
let body_impls = fields.iter().map(|v| {
let ident = &v.get_match_ident();
quote! { size += #ident.logpack_sizer(); }
});
quote!(let mut size: usize = #header_size; #(#body_impls);*; size )
} else {
let body_impls = fields.iter().map(|v| {
let ident = &v.get_match_ident();
quote! { #ident.logpack_encode(_buf)? }
});
quote!(#(#body_impls);*; Ok(()) )
};
quote! {
&#name::#ident #fields_match => {
#prefix
#body
}
}
}
fn encoder_for_enum(name: &Ident, data_enum: &DataEnum, sizer: bool) -> Tokens {
let variants = &data_enum.variants;
if variants.len() == 0 {
if sizer {
quote!(0)
} else {
quote!()
}
} else {
let mut idx : u32 = 0;
let (idx_type, header_size) = if variants.len() < 0x100 {
("u8", 1)
} else if variants.len() < 0x10000 {
("u16", 2)
} else {
("u32", 4)
};
let idx_type = Ident::new(idx_type, Span::call_site());
let impls = variants.iter().map(|v| {
let ident = &v.ident;
let prefix = if sizer {
quote! {}
} else {
quote! {
let idx : #idx_type = #idx as #idx_type;
idx.logpack_encode(_buf)?;
}
};
idx += 1;
match v.fields {
Fields::Named(ref fields) => {
let fields: Vec<_> = fields.named.iter().enumerate().map(|(i, f)|
FieldExt::new(f, i, true)).collect();
encoder_for_enum_struct(name, ident, fields, prefix, true,
sizer, header_size)
},
Fields::Unnamed(ref fields) => {
let fields: Vec<_> = fields.unnamed.iter().enumerate().map(|(i, f)|
FieldExt::new(f, i, false)).collect();
encoder_for_enum_struct(name, ident, fields, prefix, false,
sizer, header_size)
},
Fields::Unit => {
if sizer {
quote! { &#name::#ident => { #header_size } }
} else {
quote! {
&#name::#ident => {
#prefix
Ok(())
}
}
}
},
}
});
if sizer {
quote!(
match self {
#(#impls),*
}
)
} else {
quote!(
match self {
#(#impls),*
}?
)
}
}
}
fn encoder_for_struct_kind(fields: Option<&[&syn::Field]>, named: bool, sizer: bool) -> Tokens {
let unit = fields.is_none();
let fields: Vec<_> = fields.unwrap_or(&[]).iter()
.enumerate().map(|(i, f)| FieldExt::new(f, i, named)).collect();
if unit {
if sizer {
quote![ 0 ]
} else {
quote![ ]
}
} else {
let fields = fields.iter().map(|f| {
let field_expr = &f.access_expr();
if sizer {
quote!(size += #field_expr.logpack_sizer();)
} else {
quote!(#field_expr.logpack_encode(_buf)?)
}
});
if sizer {
quote!{
let mut size : usize = 0;
#(#fields);*;
size
}
} else {
quote!{ #(#fields);* }
}
}
}
struct FieldExt<'a> {
field: &'a syn::Field,
idx: usize,
named: bool,
}
impl<'a> FieldExt<'a> {
fn new(field: &'a syn::Field, idx: usize, named: bool) -> FieldExt<'a> {
FieldExt { field, idx, named }
}
fn access_expr(&self) -> Tokens {
if self.named {
let ident = &self.field.ident;
quote! { self.#ident }
} else {
let idx = syn::Index::from(self.idx);
quote! { self.#idx }
}
}
fn get_match_ident(&self) -> Ident {
if self.named {
self.field.ident.clone().unwrap()
} else {
Ident::new(&format!("f{}", self.idx), Span::call_site())
}
}
}
|
use std::fmt;
use auto_impl::auto_impl;
#[auto_impl(&)]
trait Trait {
fn foo(&self)
where Self: Clone;
fn bar(&self)
where Self: Default + fmt::Display;
}
#[derive(Clone, Default)]
struct Foo {}
impl Trait for Foo {
fn foo(&self)
where Self: Clone,
{}
fn bar(&self)
where Self: Default + fmt::Display,
{}
}
impl fmt::Display for Foo {
fn fmt(&self, _: &mut fmt::Formatter) -> fmt::Result {
unimplemented!()
}
}
fn assert_impl<T: Trait>() {}
fn main() {
assert_impl::<Foo>();
assert_impl::<&Foo>();
}
|
#[doc = "Register `ISR` reader"]
pub type R = crate::R<ISR_SPEC>;
#[doc = "Register `ISR` writer"]
pub type W = crate::W<ISR_SPEC>;
#[doc = "Field `ADRDY` reader - ADC ready flag"]
pub type ADRDY_R = crate::BitReader<ADRDYR_A>;
#[doc = "ADC ready flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ADRDYR_A {
#[doc = "0: ADC is not ready to start conversion"]
NotReady = 0,
#[doc = "1: ADC is ready to start conversion"]
Ready = 1,
}
impl From<ADRDYR_A> for bool {
#[inline(always)]
fn from(variant: ADRDYR_A) -> Self {
variant as u8 != 0
}
}
impl ADRDY_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ADRDYR_A {
match self.bits {
false => ADRDYR_A::NotReady,
true => ADRDYR_A::Ready,
}
}
#[doc = "ADC is not ready to start conversion"]
#[inline(always)]
pub fn is_not_ready(&self) -> bool {
*self == ADRDYR_A::NotReady
}
#[doc = "ADC is ready to start conversion"]
#[inline(always)]
pub fn is_ready(&self) -> bool {
*self == ADRDYR_A::Ready
}
}
#[doc = "ADC ready flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ADRDYW_AW {
#[doc = "1: Clear ADC is ready to start conversion flag"]
Clear = 1,
}
impl From<ADRDYW_AW> for bool {
#[inline(always)]
fn from(variant: ADRDYW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `ADRDY` writer - ADC ready flag"]
pub type ADRDY_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ADRDYW_AW>;
impl<'a, REG, const O: u8> ADRDY_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear ADC is ready to start conversion flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(ADRDYW_AW::Clear)
}
}
#[doc = "Field `EOSMP` reader - ADC group regular end of sampling flag"]
pub type EOSMP_R = crate::BitReader<EOSMPR_A>;
#[doc = "ADC group regular end of sampling flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum EOSMPR_A {
#[doc = "0: End of sampling phase no yet reached"]
NotEnded = 0,
#[doc = "1: End of sampling phase reached"]
Ended = 1,
}
impl From<EOSMPR_A> for bool {
#[inline(always)]
fn from(variant: EOSMPR_A) -> Self {
variant as u8 != 0
}
}
impl EOSMP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EOSMPR_A {
match self.bits {
false => EOSMPR_A::NotEnded,
true => EOSMPR_A::Ended,
}
}
#[doc = "End of sampling phase no yet reached"]
#[inline(always)]
pub fn is_not_ended(&self) -> bool {
*self == EOSMPR_A::NotEnded
}
#[doc = "End of sampling phase reached"]
#[inline(always)]
pub fn is_ended(&self) -> bool {
*self == EOSMPR_A::Ended
}
}
#[doc = "ADC group regular end of sampling flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum EOSMPW_AW {
#[doc = "1: Clear end of sampling phase reached flag"]
Clear = 1,
}
impl From<EOSMPW_AW> for bool {
#[inline(always)]
fn from(variant: EOSMPW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EOSMP` writer - ADC group regular end of sampling flag"]
pub type EOSMP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, EOSMPW_AW>;
impl<'a, REG, const O: u8> EOSMP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear end of sampling phase reached flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(EOSMPW_AW::Clear)
}
}
#[doc = "Field `EOC` reader - ADC group regular end of unitary conversion flag"]
pub type EOC_R = crate::BitReader<EOCR_A>;
#[doc = "ADC group regular end of unitary conversion flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum EOCR_A {
#[doc = "0: Regular conversion is not complete"]
NotComplete = 0,
#[doc = "1: Regular conversion complete"]
Complete = 1,
}
impl From<EOCR_A> for bool {
#[inline(always)]
fn from(variant: EOCR_A) -> Self {
variant as u8 != 0
}
}
impl EOC_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EOCR_A {
match self.bits {
false => EOCR_A::NotComplete,
true => EOCR_A::Complete,
}
}
#[doc = "Regular conversion is not complete"]
#[inline(always)]
pub fn is_not_complete(&self) -> bool {
*self == EOCR_A::NotComplete
}
#[doc = "Regular conversion complete"]
#[inline(always)]
pub fn is_complete(&self) -> bool {
*self == EOCR_A::Complete
}
}
#[doc = "ADC group regular end of unitary conversion flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum EOCW_AW {
#[doc = "1: Clear regular conversion complete flag"]
Clear = 1,
}
impl From<EOCW_AW> for bool {
#[inline(always)]
fn from(variant: EOCW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EOC` writer - ADC group regular end of unitary conversion flag"]
pub type EOC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, EOCW_AW>;
impl<'a, REG, const O: u8> EOC_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear regular conversion complete flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(EOCW_AW::Clear)
}
}
#[doc = "Field `EOS` reader - ADC group regular end of sequence conversions flag"]
pub type EOS_R = crate::BitReader<EOSR_A>;
#[doc = "ADC group regular end of sequence conversions flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum EOSR_A {
#[doc = "0: Regular sequence is not complete"]
NotComplete = 0,
#[doc = "1: Regular sequence complete"]
Complete = 1,
}
impl From<EOSR_A> for bool {
#[inline(always)]
fn from(variant: EOSR_A) -> Self {
variant as u8 != 0
}
}
impl EOS_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EOSR_A {
match self.bits {
false => EOSR_A::NotComplete,
true => EOSR_A::Complete,
}
}
#[doc = "Regular sequence is not complete"]
#[inline(always)]
pub fn is_not_complete(&self) -> bool {
*self == EOSR_A::NotComplete
}
#[doc = "Regular sequence complete"]
#[inline(always)]
pub fn is_complete(&self) -> bool {
*self == EOSR_A::Complete
}
}
#[doc = "ADC group regular end of sequence conversions flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum EOSW_AW {
#[doc = "1: Clear regular sequence complete flag"]
Clear = 1,
}
impl From<EOSW_AW> for bool {
#[inline(always)]
fn from(variant: EOSW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EOS` writer - ADC group regular end of sequence conversions flag"]
pub type EOS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, EOSW_AW>;
impl<'a, REG, const O: u8> EOS_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear regular sequence complete flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(EOSW_AW::Clear)
}
}
#[doc = "Field `OVR` reader - ADC group regular overrun flag"]
pub type OVR_R = crate::BitReader<OVRR_A>;
#[doc = "ADC group regular overrun flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OVRR_A {
#[doc = "0: No overrun occurred"]
NoOverrun = 0,
#[doc = "1: Overrun occurred"]
Overrun = 1,
}
impl From<OVRR_A> for bool {
#[inline(always)]
fn from(variant: OVRR_A) -> Self {
variant as u8 != 0
}
}
impl OVR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OVRR_A {
match self.bits {
false => OVRR_A::NoOverrun,
true => OVRR_A::Overrun,
}
}
#[doc = "No overrun occurred"]
#[inline(always)]
pub fn is_no_overrun(&self) -> bool {
*self == OVRR_A::NoOverrun
}
#[doc = "Overrun occurred"]
#[inline(always)]
pub fn is_overrun(&self) -> bool {
*self == OVRR_A::Overrun
}
}
#[doc = "ADC group regular overrun flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OVRW_AW {
#[doc = "1: Clear overrun occurred flag"]
Clear = 1,
}
impl From<OVRW_AW> for bool {
#[inline(always)]
fn from(variant: OVRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `OVR` writer - ADC group regular overrun flag"]
pub type OVR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, OVRW_AW>;
impl<'a, REG, const O: u8> OVR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear overrun occurred flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(OVRW_AW::Clear)
}
}
#[doc = "Field `JEOC` reader - ADC group injected end of unitary conversion flag"]
pub type JEOC_R = crate::BitReader<JEOCR_A>;
#[doc = "ADC group injected end of unitary conversion flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum JEOCR_A {
#[doc = "0: Injected conversion is not complete"]
NotComplete = 0,
#[doc = "1: Injected conversion complete"]
Complete = 1,
}
impl From<JEOCR_A> for bool {
#[inline(always)]
fn from(variant: JEOCR_A) -> Self {
variant as u8 != 0
}
}
impl JEOC_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> JEOCR_A {
match self.bits {
false => JEOCR_A::NotComplete,
true => JEOCR_A::Complete,
}
}
#[doc = "Injected conversion is not complete"]
#[inline(always)]
pub fn is_not_complete(&self) -> bool {
*self == JEOCR_A::NotComplete
}
#[doc = "Injected conversion complete"]
#[inline(always)]
pub fn is_complete(&self) -> bool {
*self == JEOCR_A::Complete
}
}
#[doc = "ADC group injected end of unitary conversion flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum JEOCW_AW {
#[doc = "1: Clear injected conversion complete flag"]
Clear = 1,
}
impl From<JEOCW_AW> for bool {
#[inline(always)]
fn from(variant: JEOCW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `JEOC` writer - ADC group injected end of unitary conversion flag"]
pub type JEOC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, JEOCW_AW>;
impl<'a, REG, const O: u8> JEOC_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear injected conversion complete flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(JEOCW_AW::Clear)
}
}
#[doc = "Field `JEOS` reader - ADC group injected end of sequence conversions flag"]
pub type JEOS_R = crate::BitReader<JEOSR_A>;
#[doc = "ADC group injected end of sequence conversions flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum JEOSR_A {
#[doc = "0: Injected sequence is not complete"]
NotComplete = 0,
#[doc = "1: Injected sequence complete"]
Complete = 1,
}
impl From<JEOSR_A> for bool {
#[inline(always)]
fn from(variant: JEOSR_A) -> Self {
variant as u8 != 0
}
}
impl JEOS_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> JEOSR_A {
match self.bits {
false => JEOSR_A::NotComplete,
true => JEOSR_A::Complete,
}
}
#[doc = "Injected sequence is not complete"]
#[inline(always)]
pub fn is_not_complete(&self) -> bool {
*self == JEOSR_A::NotComplete
}
#[doc = "Injected sequence complete"]
#[inline(always)]
pub fn is_complete(&self) -> bool {
*self == JEOSR_A::Complete
}
}
#[doc = "ADC group injected end of sequence conversions flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum JEOSW_AW {
#[doc = "1: Clear Injected sequence complete flag"]
Clear = 1,
}
impl From<JEOSW_AW> for bool {
#[inline(always)]
fn from(variant: JEOSW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `JEOS` writer - ADC group injected end of sequence conversions flag"]
pub type JEOS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, JEOSW_AW>;
impl<'a, REG, const O: u8> JEOS_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear Injected sequence complete flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(JEOSW_AW::Clear)
}
}
#[doc = "Field `AWD1` reader - ADC analog watchdog 1 flag"]
pub type AWD1_R = crate::BitReader<AWD1R_A>;
#[doc = "ADC analog watchdog 1 flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum AWD1R_A {
#[doc = "0: No analog watchdog event occurred"]
NoEvent = 0,
#[doc = "1: Analog watchdog event occurred"]
Event = 1,
}
impl From<AWD1R_A> for bool {
#[inline(always)]
fn from(variant: AWD1R_A) -> Self {
variant as u8 != 0
}
}
impl AWD1_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> AWD1R_A {
match self.bits {
false => AWD1R_A::NoEvent,
true => AWD1R_A::Event,
}
}
#[doc = "No analog watchdog event occurred"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
*self == AWD1R_A::NoEvent
}
#[doc = "Analog watchdog event occurred"]
#[inline(always)]
pub fn is_event(&self) -> bool {
*self == AWD1R_A::Event
}
}
#[doc = "ADC analog watchdog 1 flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum AWD1W_AW {
#[doc = "1: Clear analog watchdog event occurred flag"]
Clear = 1,
}
impl From<AWD1W_AW> for bool {
#[inline(always)]
fn from(variant: AWD1W_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `AWD1` writer - ADC analog watchdog 1 flag"]
pub type AWD1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, AWD1W_AW>;
impl<'a, REG, const O: u8> AWD1_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear analog watchdog event occurred flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(AWD1W_AW::Clear)
}
}
#[doc = "Field `AWD2` reader - ADC analog watchdog 2 flag"]
pub use AWD1_R as AWD2_R;
#[doc = "Field `AWD3` reader - ADC analog watchdog 3 flag"]
pub use AWD1_R as AWD3_R;
#[doc = "Field `AWD2` writer - ADC analog watchdog 2 flag"]
pub use AWD1_W as AWD2_W;
#[doc = "Field `AWD3` writer - ADC analog watchdog 3 flag"]
pub use AWD1_W as AWD3_W;
#[doc = "Field `JQOVF` reader - ADC group injected contexts queue overflow flag"]
pub type JQOVF_R = crate::BitReader<JQOVFR_A>;
#[doc = "ADC group injected contexts queue overflow flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum JQOVFR_A {
#[doc = "0: No injected context queue overflow has occurred"]
NoOverflow = 0,
#[doc = "1: Injected context queue overflow has occurred"]
Overflow = 1,
}
impl From<JQOVFR_A> for bool {
#[inline(always)]
fn from(variant: JQOVFR_A) -> Self {
variant as u8 != 0
}
}
impl JQOVF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> JQOVFR_A {
match self.bits {
false => JQOVFR_A::NoOverflow,
true => JQOVFR_A::Overflow,
}
}
#[doc = "No injected context queue overflow has occurred"]
#[inline(always)]
pub fn is_no_overflow(&self) -> bool {
*self == JQOVFR_A::NoOverflow
}
#[doc = "Injected context queue overflow has occurred"]
#[inline(always)]
pub fn is_overflow(&self) -> bool {
*self == JQOVFR_A::Overflow
}
}
#[doc = "ADC group injected contexts queue overflow flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum JQOVFW_AW {
#[doc = "1: Clear injected context queue overflow flag"]
Clear = 1,
}
impl From<JQOVFW_AW> for bool {
#[inline(always)]
fn from(variant: JQOVFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `JQOVF` writer - ADC group injected contexts queue overflow flag"]
pub type JQOVF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, JQOVFW_AW>;
impl<'a, REG, const O: u8> JQOVF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear injected context queue overflow flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(JQOVFW_AW::Clear)
}
}
impl R {
#[doc = "Bit 0 - ADC ready flag"]
#[inline(always)]
pub fn adrdy(&self) -> ADRDY_R {
ADRDY_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - ADC group regular end of sampling flag"]
#[inline(always)]
pub fn eosmp(&self) -> EOSMP_R {
EOSMP_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - ADC group regular end of unitary conversion flag"]
#[inline(always)]
pub fn eoc(&self) -> EOC_R {
EOC_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - ADC group regular end of sequence conversions flag"]
#[inline(always)]
pub fn eos(&self) -> EOS_R {
EOS_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - ADC group regular overrun flag"]
#[inline(always)]
pub fn ovr(&self) -> OVR_R {
OVR_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - ADC group injected end of unitary conversion flag"]
#[inline(always)]
pub fn jeoc(&self) -> JEOC_R {
JEOC_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - ADC group injected end of sequence conversions flag"]
#[inline(always)]
pub fn jeos(&self) -> JEOS_R {
JEOS_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - ADC analog watchdog 1 flag"]
#[inline(always)]
pub fn awd1(&self) -> AWD1_R {
AWD1_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - ADC analog watchdog 2 flag"]
#[inline(always)]
pub fn awd2(&self) -> AWD2_R {
AWD2_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - ADC analog watchdog 3 flag"]
#[inline(always)]
pub fn awd3(&self) -> AWD3_R {
AWD3_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - ADC group injected contexts queue overflow flag"]
#[inline(always)]
pub fn jqovf(&self) -> JQOVF_R {
JQOVF_R::new(((self.bits >> 10) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - ADC ready flag"]
#[inline(always)]
#[must_use]
pub fn adrdy(&mut self) -> ADRDY_W<ISR_SPEC, 0> {
ADRDY_W::new(self)
}
#[doc = "Bit 1 - ADC group regular end of sampling flag"]
#[inline(always)]
#[must_use]
pub fn eosmp(&mut self) -> EOSMP_W<ISR_SPEC, 1> {
EOSMP_W::new(self)
}
#[doc = "Bit 2 - ADC group regular end of unitary conversion flag"]
#[inline(always)]
#[must_use]
pub fn eoc(&mut self) -> EOC_W<ISR_SPEC, 2> {
EOC_W::new(self)
}
#[doc = "Bit 3 - ADC group regular end of sequence conversions flag"]
#[inline(always)]
#[must_use]
pub fn eos(&mut self) -> EOS_W<ISR_SPEC, 3> {
EOS_W::new(self)
}
#[doc = "Bit 4 - ADC group regular overrun flag"]
#[inline(always)]
#[must_use]
pub fn ovr(&mut self) -> OVR_W<ISR_SPEC, 4> {
OVR_W::new(self)
}
#[doc = "Bit 5 - ADC group injected end of unitary conversion flag"]
#[inline(always)]
#[must_use]
pub fn jeoc(&mut self) -> JEOC_W<ISR_SPEC, 5> {
JEOC_W::new(self)
}
#[doc = "Bit 6 - ADC group injected end of sequence conversions flag"]
#[inline(always)]
#[must_use]
pub fn jeos(&mut self) -> JEOS_W<ISR_SPEC, 6> {
JEOS_W::new(self)
}
#[doc = "Bit 7 - ADC analog watchdog 1 flag"]
#[inline(always)]
#[must_use]
pub fn awd1(&mut self) -> AWD1_W<ISR_SPEC, 7> {
AWD1_W::new(self)
}
#[doc = "Bit 8 - ADC analog watchdog 2 flag"]
#[inline(always)]
#[must_use]
pub fn awd2(&mut self) -> AWD2_W<ISR_SPEC, 8> {
AWD2_W::new(self)
}
#[doc = "Bit 9 - ADC analog watchdog 3 flag"]
#[inline(always)]
#[must_use]
pub fn awd3(&mut self) -> AWD3_W<ISR_SPEC, 9> {
AWD3_W::new(self)
}
#[doc = "Bit 10 - ADC group injected contexts queue overflow flag"]
#[inline(always)]
#[must_use]
pub fn jqovf(&mut self) -> JQOVF_W<ISR_SPEC, 10> {
JQOVF_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "ADC interrupt and status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`isr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`isr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ISR_SPEC;
impl crate::RegisterSpec for ISR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`isr::R`](R) reader structure"]
impl crate::Readable for ISR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`isr::W`](W) writer structure"]
impl crate::Writable for ISR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets ISR to value 0"]
impl crate::Resettable for ISR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// Copyright 2019, 2020 Wingchain
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::convert::TryInto;
use std::sync::Arc;
use crypto::address::AddressImpl;
use crypto::dsa::DsaImpl;
use node_consensus_base::ConsensusInMessage;
use node_executor::module;
use primitives::{codec, Balance, Event, Proof, Receipt};
use utils_test::test_accounts;
mod base;
#[tokio::test]
async fn test_poa_balance() {
let _ = env_logger::try_init();
let dsa = Arc::new(DsaImpl::Ed25519);
let address = Arc::new(AddressImpl::Blake2b160);
let test_accounts = test_accounts(dsa.clone(), address);
let (account1, account2) = (&test_accounts[0], &test_accounts[1]);
let authority_accounts = [account1];
let (chain, txpool, consensus) = base::get_standalone_service(&authority_accounts, account1);
let proof = chain
.get_proof(&chain.get_block_hash(&0).unwrap().unwrap())
.unwrap()
.unwrap();
assert_eq!(proof, Default::default());
let tx1_hash = base::insert_tx(
&chain,
&txpool,
chain
.build_transaction(
Some((account1.secret_key.clone(), 0, 10)),
chain
.build_call(
"balance".to_string(),
"transfer".to_string(),
module::balance::TransferParams {
recipient: account2.address.clone(),
value: 1,
},
)
.unwrap(),
)
.unwrap(),
)
.await;
base::wait_txpool(&txpool, 1).await;
// generate block 1
consensus
.in_message_tx()
.unbounded_send(ConsensusInMessage::Generate)
.unwrap();
base::wait_block_execution(&chain, 1).await;
let block_hash = chain.get_block_hash(&1).unwrap().unwrap();
let proof = chain.get_proof(&block_hash).unwrap().unwrap();
let expected_proof: Proof = {
let proof =
node_consensus_poa::proof::Proof::new(&block_hash, &account1.secret_key, dsa.clone())
.unwrap();
proof.try_into().unwrap()
};
assert_eq!(proof, expected_proof);
let tx2_hash = base::insert_tx(
&chain,
&txpool,
chain
.build_transaction(
Some((account1.secret_key.clone(), 0, 11)),
chain
.build_call(
"balance".to_string(),
"transfer".to_string(),
module::balance::TransferParams {
recipient: account2.address.clone(),
value: 2,
},
)
.unwrap(),
)
.unwrap(),
)
.await;
base::wait_txpool(&txpool, 1).await;
// generate block 2
consensus
.in_message_tx()
.unbounded_send(ConsensusInMessage::Generate)
.unwrap();
base::wait_block_execution(&chain, 2).await;
let tx3_hash = base::insert_tx(
&chain,
&txpool,
chain
.build_transaction(
Some((account1.secret_key.clone(), 0, 12)),
chain
.build_call(
"balance".to_string(),
"transfer".to_string(),
module::balance::TransferParams {
recipient: account2.address.clone(),
value: 3,
},
)
.unwrap(),
)
.unwrap(),
)
.await;
base::wait_txpool(&txpool, 1).await;
// generate block 3
consensus
.in_message_tx()
.unbounded_send(ConsensusInMessage::Generate)
.unwrap();
base::wait_block_execution(&chain, 3).await;
// check block 1
let balance: Balance = chain
.execute_call_with_block_number(
&1,
Some(&account1.address),
"balance".to_string(),
"get_balance".to_string(),
node_executor_primitives::EmptyParams,
)
.unwrap()
.unwrap();
assert_eq!(balance, 9);
let block1 = chain
.get_block(&chain.get_block_hash(&1).unwrap().unwrap())
.unwrap()
.unwrap();
assert_eq!(block1.body.payload_txs[0], tx1_hash);
// check block 2
let balance: Balance = chain
.execute_call_with_block_number(
&2,
Some(&account1.address),
"balance".to_string(),
"get_balance".to_string(),
node_executor_primitives::EmptyParams,
)
.unwrap()
.unwrap();
assert_eq!(balance, 7);
let block2 = chain
.get_block(&chain.get_block_hash(&2).unwrap().unwrap())
.unwrap()
.unwrap();
assert_eq!(block2.body.payload_txs[0], tx2_hash);
// check block 3
let balance: Balance = chain
.execute_call_with_block_number(
&3,
Some(&account1.address),
"balance".to_string(),
"get_balance".to_string(),
node_executor_primitives::EmptyParams,
)
.unwrap()
.unwrap();
assert_eq!(balance, 4);
let block3 = chain
.get_block(&chain.get_block_hash(&3).unwrap().unwrap())
.unwrap()
.unwrap();
assert_eq!(block3.body.payload_txs[0], tx3_hash);
let tx3_receipt = chain.get_receipt(&tx3_hash).unwrap().unwrap();
assert_eq!(
tx3_receipt,
Receipt {
block_number: 3,
events: vec![Event::from_data(
"Transferred".to_string(),
module::balance::Transferred {
sender: account1.address.clone(),
recipient: account2.address.clone(),
value: 3,
},
)
.unwrap()],
result: Ok(codec::encode(&()).unwrap()),
}
);
}
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT license.
*/
#![warn(missing_debug_implementations, missing_docs)]
//! Vertex and its Adjacency List
use crate::model::GRAPH_SLACK_FACTOR;
use super::AdjacencyList;
/// The out neighbors of vertex_id
#[derive(Debug)]
pub struct VertexAndNeighbors {
/// The id of the vertex
pub vertex_id: u32,
/// All out neighbors (id) of vertex_id
neighbors: AdjacencyList,
}
impl VertexAndNeighbors {
/// Create VertexAndNeighbors with id and capacity
pub fn for_range(id: u32, range: usize) -> Self {
Self {
vertex_id: id,
neighbors: AdjacencyList::for_range(range),
}
}
/// Create VertexAndNeighbors with id and neighbors
pub fn new(vertex_id: u32, neighbors: AdjacencyList) -> Self {
Self {
vertex_id,
neighbors,
}
}
/// Get size of neighbors
#[inline(always)]
pub fn size(&self) -> usize {
self.neighbors.len()
}
/// Update the neighbors vector (post a pruning exercise)
#[inline(always)]
pub fn set_neighbors(&mut self, new_neighbors: AdjacencyList) {
// Replace the graph entry with the pruned neighbors
self.neighbors = new_neighbors;
}
/// Get the neighbors
#[inline(always)]
pub fn get_neighbors(&self) -> &AdjacencyList {
&self.neighbors
}
/// Adds a node to the list of neighbors for the given node.
///
/// # Arguments
///
/// * `node_id` - The ID of the node to add.
/// * `range` - The range of the graph.
///
/// # Return
///
/// Returns `None` if the node is already in the list of neighbors, or a `Vec` containing the updated list of neighbors if the list of neighbors is full.
pub fn add_to_neighbors(&mut self, node_id: u32, range: u32) -> Option<Vec<u32>> {
// Check if n is already in the graph entry
if self.neighbors.contains(&node_id) {
return None;
}
let neighbor_len = self.neighbors.len();
// If not, check if the graph entry has enough space
if neighbor_len < (GRAPH_SLACK_FACTOR * range as f64) as usize {
// If yes, add n to the graph entry
self.neighbors.push(node_id);
return None;
}
let mut copy_of_neighbors = Vec::with_capacity(neighbor_len + 1);
unsafe {
let dst = copy_of_neighbors.as_mut_ptr();
std::ptr::copy_nonoverlapping(self.neighbors.as_ptr(), dst, neighbor_len);
dst.add(neighbor_len).write(node_id);
copy_of_neighbors.set_len(neighbor_len + 1);
}
Some(copy_of_neighbors)
}
}
#[cfg(test)]
mod vertex_and_neighbors_tests {
use crate::model::GRAPH_SLACK_FACTOR;
use super::*;
#[test]
fn test_set_with_capacity() {
let neighbors = VertexAndNeighbors::for_range(20, 10);
assert_eq!(neighbors.vertex_id, 20);
assert_eq!(
neighbors.neighbors.capacity(),
(10_f32 * GRAPH_SLACK_FACTOR as f32).ceil() as usize
);
}
#[test]
fn test_size() {
let mut neighbors = VertexAndNeighbors::for_range(20, 10);
for i in 0..5 {
neighbors.neighbors.push(i);
}
assert_eq!(neighbors.size(), 5);
}
#[test]
fn test_set_neighbors() {
let mut neighbors = VertexAndNeighbors::for_range(20, 10);
let new_vec = AdjacencyList::from(vec![1, 2, 3, 4, 5]);
neighbors.set_neighbors(AdjacencyList::from(new_vec.clone()));
assert_eq!(neighbors.neighbors, new_vec);
}
#[test]
fn test_get_neighbors() {
let mut neighbors = VertexAndNeighbors::for_range(20, 10);
neighbors.set_neighbors(AdjacencyList::from(vec![1, 2, 3, 4, 5]));
let neighbor_ref = neighbors.get_neighbors();
assert!(std::ptr::eq(&neighbors.neighbors, neighbor_ref))
}
#[test]
fn test_add_to_neighbors() {
let mut neighbors = VertexAndNeighbors::for_range(20, 10);
assert_eq!(neighbors.add_to_neighbors(1, 1), None);
assert_eq!(neighbors.neighbors, AdjacencyList::from(vec![1]));
assert_eq!(neighbors.add_to_neighbors(1, 1), None);
assert_eq!(neighbors.neighbors, AdjacencyList::from(vec![1]));
let ret = neighbors.add_to_neighbors(2, 1);
assert!(ret.is_some());
assert_eq!(ret.unwrap(), vec![1, 2]);
assert_eq!(neighbors.neighbors, AdjacencyList::from(vec![1]));
assert_eq!(neighbors.add_to_neighbors(2, 2), None);
assert_eq!(neighbors.neighbors, AdjacencyList::from(vec![1, 2]));
}
}
|
//! Types for the `open.189.cn` API responses.
/// An access token for the `open.189.cn` API.
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct AccessToken {
/// Nonce for distinguishing between access token requests.
pub state: String,
/// The access token returned.
pub token: String,
/// TTL of the access token, in seconds.
pub expires_in: u64,
}
/// A summary of a successfully queued SMS verification code.
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct SentSmsCode {
/// The time the SMS is queued for sending, as recorded by the API.
pub send_time: u64,
/// API-generated unique identifier for the SMS.
pub sms_id: String,
}
|
//! This module contains a configuration of a Border to set its color via [`BorderColor`].
use crate::{
grid::{
color::AnsiColor,
config::{Border, ColoredConfig, Entity},
records::{ExactRecords, Records},
},
settings::{color::Color, CellOption, TableOption},
};
/// BorderColored represents a colored border of a Cell.
///
/// ```rust,no_run
/// # use tabled::{settings::{style::BorderColor, Style, Color, object::Rows, Modify}, Table};
/// #
/// # let data: Vec<&'static str> = Vec::new();
/// #
/// let table = Table::new(&data)
/// .with(Style::ascii())
/// .with(Modify::new(Rows::single(0)).with(BorderColor::default().top(Color::FG_RED)));
/// ```
#[derive(Debug, Clone, Default, Eq, PartialEq)]
pub struct BorderColor(Border<AnsiColor<'static>>);
impl BorderColor {
/// This function constructs a cell borders with all sides set.
#[allow(clippy::too_many_arguments)]
pub fn full(
top: Color,
bottom: Color,
left: Color,
right: Color,
top_left: Color,
top_right: Color,
bottom_left: Color,
bottom_right: Color,
) -> Self {
Self(Border::full(
top.into(),
bottom.into(),
left.into(),
right.into(),
top_left.into(),
top_right.into(),
bottom_left.into(),
bottom_right.into(),
))
}
/// This function constructs a cell borders with all sides's char set to a given character.
/// It behaves like [`Border::full`] with the same character set to each side.
pub fn filled(c: Color) -> Self {
let c: AnsiColor<'_> = c.into();
Self(Border {
top: Some(c.clone()),
bottom: Some(c.clone()),
left: Some(c.clone()),
right: Some(c.clone()),
left_bottom_corner: Some(c.clone()),
left_top_corner: Some(c.clone()),
right_bottom_corner: Some(c.clone()),
right_top_corner: Some(c),
})
}
/// Set a top border character.
pub fn top(mut self, c: Color) -> Self {
self.0.top = Some(c.into());
self
}
/// Set a bottom border character.
pub fn bottom(mut self, c: Color) -> Self {
self.0.bottom = Some(c.into());
self
}
/// Set a left border character.
pub fn left(mut self, c: Color) -> Self {
self.0.left = Some(c.into());
self
}
/// Set a right border character.
pub fn right(mut self, c: Color) -> Self {
self.0.right = Some(c.into());
self
}
/// Set a top left intersection character.
pub fn corner_top_left(mut self, c: Color) -> Self {
self.0.left_top_corner = Some(c.into());
self
}
/// Set a top right intersection character.
pub fn corner_top_right(mut self, c: Color) -> Self {
self.0.right_top_corner = Some(c.into());
self
}
/// Set a bottom left intersection character.
pub fn corner_bottom_left(mut self, c: Color) -> Self {
self.0.left_bottom_corner = Some(c.into());
self
}
/// Set a bottom right intersection character.
pub fn corner_bottom_right(mut self, c: Color) -> Self {
self.0.right_bottom_corner = Some(c.into());
self
}
}
impl<R> CellOption<R, ColoredConfig> for BorderColor
where
R: Records + ExactRecords,
{
fn change(self, records: &mut R, cfg: &mut ColoredConfig, entity: Entity) {
let count_rows = records.count_rows();
let count_columns = records.count_columns();
let border_color = &self.0;
for pos in entity.iter(count_rows, count_columns) {
cfg.set_border_color(pos, border_color.clone());
}
}
}
impl<R, D> TableOption<R, D, ColoredConfig> for BorderColor
where
R: Records + ExactRecords,
{
fn change(self, records: &mut R, cfg: &mut ColoredConfig, _: &mut D) {
let count_rows = records.count_rows();
let count_columns = records.count_columns();
let border_color = &self.0;
for row in 0..count_rows {
for col in 0..count_columns {
cfg.set_border_color((row, col), border_color.clone());
}
}
}
}
impl From<BorderColor> for Border<AnsiColor<'static>> {
fn from(val: BorderColor) -> Self {
val.0
}
}
|
use duct::cmd;
pub fn update_toolkit() {
let output1 = cmd!("curl", "-l", "https://raw.githubusercontent.com/WesBosch/brunch-toolkit/main/brunch-toolkit","-o","/tmp/brunch-toolkit").read().unwrap();
println!("{}", output1);
let output2 = cmd!("install", "-Dt","/usr/local/bin","-m","755","/tmp/brunch-toolkit").read().unwrap();
println!("{}", output2);
let output2 = cmd!("rm", "/tmp/brunch-toolkit").read().unwrap();
println!("{}", output2);
}
pub fn update_framework() {
let output1 = cmd!("/usr/local/bin/brunch-toolkit","-pwa-au").read().unwrap();
println!("{}", output1);
}
pub fn update_daemon() {
let output1 = cmd!("curl", "-Ls", "https://brunch.tools/install.sh").pipe(cmd!("sudo","bash")).read().unwrap();
println!("{}", output1);
} |
mod token;
pub use self::token::{Token, TokenType};
use std::str;
use std::option::Option;
pub struct Interpreter<'a> {
pos: usize,
body: String,
current_char: Option<char>,
current_token: Token,
chars: str::Chars<'a>,
}
impl<'a> Interpreter<'a> {
pub fn new(body: &'a String) -> Interpreter<'a> {
Interpreter {
pos: 0,
body: body.clone(),
current_char: None,
current_token: Token::eof(),
chars: body.chars(),
}
}
fn current_char(&self) -> char {
self.current_char.unwrap()
}
fn error(&self) -> ! {
panic!("Error parsing input: character {} @ {}",
self.current_token,
self.pos);
}
fn advance(&mut self) {
self.pos += 1;
self.current_char = self.chars.next();
}
fn skip_whitespace(&mut self) {
while self.current_char.is_some() && is_whitespace(self.current_char()) {
self.advance();
}
}
fn integer(&mut self) -> String {
let mut i = "".to_string();
while is_digit(self.current_char()) {
i.push(self.current_char());
self.advance();
}
i
}
fn get_next_token(&mut self) -> Token {
while self.current_char.is_some() {
if is_whitespace(self.current_char()) {
self.skip_whitespace();
continue;
}
if is_digit(self.current_char()) {
let t = Token {
kind: TokenType::Integer,
value: self.integer(),
};
self.advance();
return t;
}
let t = Token::op(self.current_char());
self.advance();
return t;
}
Token::eof()
}
fn eat(&mut self, t: TokenType) {
if self.current_token.kind == t {
self.current_token = self.get_next_token();
return;
}
self.error();
}
fn term(&self) -> i32 {}
fn expr(&mut self) -> Result<i32, String> {
self.current_token = self.get_next_token();
while self.current_token.kind != TokenType::EOF {
let left = self.current_token.clone();
self.eat_token(TokenType::Integer);
let op = self.current_token.clone();
match op.kind {
TokenType::Add => self.eat_token(TokenType::Add),
TokenType::Subtract => self.eat_token(TokenType::Subtract),
TokenType::Multiply => self.eat_token(TokenType::Multiply),
TokenType::Divide => self.eat_token(TokenType::Divide),
_ => return Err(format!("Unrecognized operator: {}", op.value)),
};
let right = self.current_token.clone();
self.eat_token(TokenType::Integer);
match op.kind {
TokenType::Add => {
Ok(left.value.parse::<i32>().unwrap() + right.value.parse::<i32>().unwrap())
}
TokenType::Subtract => {
Ok(left.value.parse::<i32>().unwrap() - right.value.parse::<i32>().unwrap())
}
TokenType::Multiply => {
Ok(left.value.parse::<i32>().unwrap() * right.value.parse::<i32>().unwrap())
}
TokenType::Divide => {
Ok(left.value.parse::<i32>().unwrap() / right.value.parse::<i32>().unwrap())
}
_ => Err("Op not correct type!".to_string()),
}
}
}
}
pub fn is_whitespace(c: char) -> bool {
" \n\t".contains(c)
}
pub fn is_digit(c: char) -> bool {
"0123456789".contains(c)
}
pub fn run(body: String) -> Result<i32, String> {
let mut i = Interpreter::new(&body);
i.expr()
}
|
#![no_main]
#![no_std]
extern crate cortex_m_rt;
#[cfg(target_env = "")] // appease clippy
#[panic_handler]
fn panic(info: &core::panic::PanicInfo) -> ! {
cortex_m::interrupt::disable();
minitest::log!("{}", info);
minitest::fail()
}
#[minitest::tests]
mod tests {
use minitest::log;
#[init]
fn init() -> cortex_m::Peripherals {
log!("Hello world!");
cortex_m::Peripherals::take().unwrap()
}
#[test]
fn double_take() {
assert!(cortex_m::Peripherals::take().is_none());
}
#[test]
#[cfg(not(feature = "semihosting"))] // QEMU does not model the cycle counter
fn cycle_count(p: &mut cortex_m::Peripherals) {
#[cfg(not(armv6m))]
{
use cortex_m::peripheral::DWT;
assert!(p.DWT.has_cycle_counter());
p.DCB.enable_trace();
p.DWT.disable_cycle_counter();
const TEST_COUNT: u32 = 0x5555_AAAA;
p.DWT.set_cycle_count(TEST_COUNT);
assert_eq!(DWT::cycle_count(), TEST_COUNT);
p.DWT.enable_cycle_counter();
assert!(DWT::cycle_count() > TEST_COUNT);
}
#[cfg(armv6m)]
{
assert!(!p.DWT.has_cycle_counter());
}
}
}
|
use super::operators::Operator;
use super::types::Type;
use super::Location;
/*
program -> block*
block -> func
func -> type-id `(` param-seq `)` comp-stmt
param-seq -> type-id (`,` type-id)* | epsilon
comp-stmt -> `{` stmt* `}`
type-id -> type identifier
type -> primitive
stmt -> assign | declare | dec-ass | return | if
assign -> identifier `=` expr7 `;`
declare -> type-id `;`
dec-ass -> type-id `=` expr7 `;`
return -> `return` expr7 `;`
if -> `if` `(` expr7 `)` (stmt | comp-stmt) (`else` (stmt | comp-stmt))?
while -> `while` `(` expr7 `)` (stmt | comp-stmt)
expr7 -> expr4 expr7' // equivalence
expr7' -> (`==`|`!=`) expr4 expr7' | epsilon
expr4 -> expr3 expr4' // addition
expr4' -> (`+`|`-`) expr3 expr4' | epsilon
expr3 -> expr2 expr3' // multiplication
expr3' -> (`*`|`/`) expr2 expr3' | epsilon
expr2 -> (`+`|`-`)? factor
factor -> `(` expr7 `)` | integer | identifier | call-func
call-func -> identifier `(` arg-seq `)`
arg-seq -> expr7 (`,` expr7)* | epsilon
*/
#[derive(Debug, Clone)]
pub struct ASTIdentifier {
name: String,
id_type: Type,
location: Location,
}
impl ASTIdentifier {
pub fn new(name: String, id_type: Type, loc: Location) -> Self {
Self {
name,
id_type,
location: loc,
}
}
pub fn get_name(&self) -> String {
self.name.clone()
}
pub fn get_type(&self) -> Type {
self.id_type.clone()
}
}
#[derive(Debug, Clone)]
pub enum ASTExprKind {
Binary(Box<ASTExpr>, Box<ASTExpr>, Operator),
Unary(Box<ASTExpr>, Operator),
Identifier(ASTIdentifier),
Integer(u32),
FuncCall(ASTIdentifier, Vec<ASTExpr>),
}
#[derive(Debug, Clone)]
pub struct ASTExpr {
pub kind: ASTExprKind,
location: Location,
}
impl ASTExpr {
pub fn new(kind: ASTExprKind, loc: Location) -> Self {
Self {
kind,
location: loc,
}
}
}
#[derive(Debug, Clone)]
pub enum ASTStmtKind {
Assign(ASTIdentifier, ASTExpr),
Declare(ASTIdentifier),
DeclareAssign(ASTIdentifier, ASTExpr),
Return(ASTExpr),
If(ASTExpr, Vec<ASTStmt>, Vec<ASTStmt>), // cond, true, false
While(ASTExpr, Vec<ASTStmt>), // cond, stmts
}
#[derive(Debug, Clone)]
pub struct ASTStmt {
pub kind: ASTStmtKind,
scope: i32,
location: Location,
}
impl ASTStmt {
pub fn new(kind: ASTStmtKind, scope: i32, loc: Location) -> Self {
Self {
kind,
scope,
location: loc,
}
}
}
#[derive(Debug, Clone)]
pub enum ASTBlockKind {
Func(ASTIdentifier, Vec<ASTIdentifier>, Vec<ASTStmt>),
}
#[derive(Debug, Clone)]
pub struct ASTBlock {
pub kind: ASTBlockKind,
scope: i32,
location: Location,
}
impl ASTBlock {
pub fn new(kind: ASTBlockKind, scope: i32, loc: Location) -> Self {
Self {
kind,
scope,
location: loc,
}
}
}
#[derive(Debug, Clone)]
pub struct AST {
program: Vec<ASTBlock>,
scope: i32,
location: Location,
}
impl AST {
pub fn new(program: Vec<ASTBlock>, scope: i32, location: Location) -> Self {
Self {
program,
scope,
location,
}
}
pub fn program(&self) -> Vec<ASTBlock> {
self.program.clone()
}
}
pub trait HasSyntaxKind {
type Kind;
fn get_kind(&self) -> Self::Kind;
}
pub trait AsSyntaxExpression {
fn get_loc(&self) -> Location;
}
pub trait AsSyntaxStatement {
fn get_scope(&self) -> i32;
fn get_loc(&self) -> Location;
}
impl AsSyntaxExpression for ASTIdentifier {
fn get_loc(&self) -> Location {
self.location
}
}
impl AsSyntaxExpression for ASTExpr {
fn get_loc(&self) -> Location {
self.location
}
}
impl HasSyntaxKind for ASTExpr {
type Kind = ASTExprKind;
fn get_kind(&self) -> Self::Kind {
self.kind.clone()
}
}
impl AsSyntaxStatement for ASTStmt {
fn get_scope(&self) -> i32 {
self.scope
}
fn get_loc(&self) -> Location {
self.location
}
}
impl HasSyntaxKind for ASTStmt {
type Kind = ASTStmtKind;
fn get_kind(&self) -> Self::Kind {
self.kind.clone()
}
}
impl AsSyntaxStatement for ASTBlock {
fn get_scope(&self) -> i32 {
self.scope
}
fn get_loc(&self) -> Location {
self.location
}
}
impl HasSyntaxKind for ASTBlock {
type Kind = ASTBlockKind;
fn get_kind(&self) -> Self::Kind {
self.kind.clone()
}
}
impl AsSyntaxStatement for AST {
fn get_scope(&self) -> i32 {
self.scope
}
fn get_loc(&self) -> Location {
self.location
}
}
impl std::fmt::Display for ASTIdentifier {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.name)
}
}
impl std::fmt::Display for ASTExpr {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
use ASTExprKind::*;
match &self.kind {
Binary(l, r, ope) => write!(f, "{} {} {}", *l, *r, ope),
Unary(factor, ope) => write!(f, "0 {} {}", *factor, ope),
Identifier(name) => write!(f, "{}", name),
Integer(n) => write!(f, "{}", n),
FuncCall(name, args) => {
let arg_string = args
.iter()
.map(|arg| format!("{}", arg))
.collect::<Vec<String>>()
.join(", ");
write!(f, "{}({})", name, arg_string)
}
}
}
}
impl std::fmt::Display for ASTStmt {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
use ASTStmtKind::*;
match &self.kind {
Assign(id, expr) | DeclareAssign(id, expr) => {
write!(f, "{} {} =", id, expr)
}
Declare(id) => write!(f, "{}", id),
Return(expr) => write!(f, "return {}", expr),
If(cond, t_stmts, f_stmts) => {
let mut lines = vec![format!("if({}){{", cond)];
for line in t_stmts {
lines.push(format!(" {}\n", line));
}
lines.push(format!("}}"));
if !f_stmts.is_empty() {
lines.push(format!("else{{"));
for line in f_stmts {
lines.push(format!(" {}\n", line));
}
lines.push(format!("}}"));
}
write!(f, "{}", lines.join(""))
}
While(cond, stmts) => {
let mut lines = vec![format!("while({}){{", cond)];
for line in stmts {
lines.push(format!(" {}\n", line));
}
lines.push(format!("}}"));
write!(f, "{}", lines.join(""))
}
}
}
}
impl std::fmt::Display for ASTBlock {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match &self.kind {
ASTBlockKind::Func(id, params, stmts) => {
let param_string = params
.iter()
.map(|arg| format!("{}", arg))
.collect::<Vec<String>>()
.join(", ");
let mut lines = vec![format!("{}({}):\n", id, param_string)];
for line in stmts {
lines.push(format!(" {}\n", line));
}
let func = lines.join("");
write!(f, "{}", func)
}
}
}
}
impl std::fmt::Display for AST {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let program = self
.program
.iter()
.map(|block| format!("{}", block))
.collect::<Vec<String>>()
.join("");
write!(f, "{}", program)
}
}
pub fn visualize_ast(ast: AST) {
println!("Program:");
for block in ast.program {
visualize_ast_block(block, 1);
}
}
fn visualize_ast_identifier(id: ASTIdentifier, i: usize) {
print!("{}", " ".repeat(i));
println!("Identifier {}", id)
}
fn visualize_ast_expr(expr: ASTExpr, i: usize) {
print!("{}", " ".repeat(i));
use ASTExprKind::*;
match expr.kind {
Binary(l, r, ope) => {
println!("Binary {}:", ope);
visualize_ast_expr(*l, i + 1);
visualize_ast_expr(*r, i + 1);
}
Unary(factor, ope) => {
println!("Unary {}:", ope);
visualize_ast_expr(*factor, i + 1);
}
Identifier(id) => println!("Identifier {}", id),
Integer(n) => println!("Integer {}", n),
FuncCall(id, args) => {
println!("FunctionCalled {}:", id);
for arg in args {
visualize_ast_expr(arg, i + 1);
}
}
}
}
fn visualize_ast_stmt(stmt: ASTStmt, i: usize) {
print!("{}", " ".repeat(i));
use ASTStmtKind::*;
match stmt.kind {
Assign(id, expr) => {
println!("Assign <scope: {}>:", stmt.scope);
visualize_ast_identifier(id, i + 1);
visualize_ast_expr(expr, i + 1);
}
Declare(id) => {
println!("Declare <scope: {}>:", stmt.scope);
visualize_ast_identifier(id, i + 1);
}
DeclareAssign(id, expr) => {
println!("DeclareAssign <scope: {}>:", stmt.scope);
visualize_ast_identifier(id, i + 1);
visualize_ast_expr(expr, i + 1);
}
Return(expr) => {
println!("Return <scope: {}>:", stmt.scope);
visualize_ast_expr(expr, i + 1);
}
If(cond, t_stmts, f_stmts) => {
println!("If <scope: {}> :", stmt.scope);
visualize_ast_expr(cond, i + 1);
println!("{}If-Statement:", " ".repeat(i));
for stmt in t_stmts {
visualize_ast_stmt(stmt, i + 1);
}
if !f_stmts.is_empty() {
println!("{}Else <scope: {}>:", " ".repeat(i), stmt.scope);
for stmt in f_stmts {
visualize_ast_stmt(stmt, i + 1);
}
}
}
While(cond, stmts) => {
println!("While <scope: {}> :", stmt.scope);
visualize_ast_expr(cond, i + 1);
println!("{}While-Statement:", " ".repeat(i));
for stmt in stmts {
visualize_ast_stmt(stmt, i + 1);
}
}
}
}
fn visualize_ast_block(block: ASTBlock, i: usize) {
print!("{}", " ".repeat(i));
match block.kind {
ASTBlockKind::Func(id, params, stmts) => {
println!("Function <scope: {}> {}:", block.scope, id);
for p in params {
visualize_ast_identifier(p, i + 1);
}
println!("{}Function-Statement:", " ".repeat(i));
for stmt in stmts {
visualize_ast_stmt(stmt, i + 1);
}
}
}
}
|
use serde_json::json;
use yew::prelude::*;
use crate::components::loading_spinner::LoadingSpinner;
#[derive(Clone, Debug, PartialEq, Properties)]
pub struct Props {
pub request_joining_lobby: bool,
pub request_creating_lobby: bool,
}
pub struct OutOfLobby {
link: ComponentLink<Self>,
props: Props,
// on_set_name: Callback<String>,
name: String,
lobby_code_input: String,
name_input: String,
}
impl OutOfLobby {
fn view_name_input(&self) -> Html {
html! {
<div class="flex flex-col p-2 bg-white shadow-lg">
<h2>{"Step 1"}</h2>
<div class="flex flex-row">
<label
for="name-input"
class="self-center"
>
{"Name:"}
</label>
<input
id="name-input"
value=&self.name_input
oninput=self.link.callback(|e: InputData| Msg::NameInputChange(e.value))
placeholder="Pick a name your friends will see."
class="m-4 p-2 flex-grow focus:ring-2 focus:ring-blue-600 rounded-lg shadow-md"
/>
</div>
<div class="flex flex-row">
<button
class="w-48 mx-auto my-4 py-2 disabled:opacity-50 bg-blue-200 hover:bg-blue-300 rounded-lg shadow-md"
onclick=self.link.callback(move |_| { Msg::NameInputConfirm })
>
{ "OK" }
</button>
</div>
</div>
}
}
fn view_create_or_join(&self) -> Html {
let create_lobby_message = json!({
"action": "hearts",
"type": "lobby_action_create",
"name": self.name_input.clone(),
});
let join_lobby_message = json!({
"action": "hearts",
"type": "lobby_action_join",
"lobby_code": self.lobby_code_input.clone(),
"name": self.name_input.clone(),
});
html! {
<div class="flex flex-col p-2 bg-white shadow-lg">
<h2>{"Step 2"}</h2>
<div class="flex flex-row">
<label
for="lobby_code-input"
class="self-center"
>
{"Lobby code:"}
</label>
<input
id="lobby-code-input"
value=&self.lobby_code_input
oninput=self.link.callback(|e: InputData| Msg::LobbyCodeInputChange(e.value))
placeholder="Get this from a friend"
class="m-4 p-2 flex-grow focus:ring-2 focus:ring-blue-600 rounded-lg shadow-md"
/>
</div>
<div class="flex flex-row">
<button
class="w-48 mx-auto my-4 py-2 disabled:opacity-50 bg-blue-200 hover:bg-blue-300 rounded-lg shadow-md"
disabled={self.props.request_creating_lobby || self.props.request_joining_lobby}
onclick=self.link.callback(move |_| {
Msg::Ignore
// Msg::WsAction(WsAction::SendLobbyJoin(join_lobby_message.clone()))
})
>
{ if self.props.request_joining_lobby { html!{ <LoadingSpinner /> } } else { html!{} } }
{ "Join lobby" }
</button>
</div>
<div>
<hr />
<p class="text-center">{ "Or" }</p>
</div>
<button
class="w-48 mx-auto my-4 py-2 disabled:opacity-50 bg-blue-200 hover:bg-blue-300 rounded-lg shadow-md"
disabled={self.props.request_creating_lobby || self.props.request_joining_lobby}
onclick=self.link.callback(move |_| {
Msg::Ignore
// Msg::WsAction(WsAction::SendLobbyCreate(create_lobby_message.clone()))
})
>
{ if self.props.request_creating_lobby { html!{ <LoadingSpinner />} } else { html!{} } }
{ "Create lobby" }
</button>
</div>
}
}
}
enum Msg {
Ignore,
NameInputConfirm,
NameInputChange(String),
LobbyCodeInputChange(String),
}
impl Component for OutOfLobby {
type Message = Msg;
type Properties = Props;
fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {
Self {
link,
props,
name: "".to_owned(),
lobby_code_input: "".to_owned(),
name_input: "".to_owned(),
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::Ignore => {}
Msg::NameInputChange(new_value) => {
self.name_input = new_value;
}
Msg::LobbyCodeInputChange(new_value) => {
self.lobby_code_input = new_value;
}
Msg::NameInputConfirm => {
self.name = self.name_input.clone();
}
}
true
}
fn change(&mut self, _props: Self::Properties) -> ShouldRender {
false
}
fn view(&self) -> Html {
html! {
<>
<div>
<h1 class="text-xl text-center pb-4">{"Hearts app"}</h1>
<p>{"Play a game of the trick based card game hearts with your friends."}</p>
<p>{"This is a work in progress by Douglas Anderson."}</p>
</div>
<div>
<p class="text-center">{ "Not currently in a lobby." }</p>
</div>
{ if self.name.is_empty() { self.view_name_input() } else { self.view_create_or_join() } }
</>
}
}
}
|
use std::{io::{self, Write}};
use serde::Serialize;
use crate::{client::tui::Tui, common::{debug_message::DebugMessageType, encryption::NetworkedPublicKey, message_type::MsgType}};
use super::ConnectionManager;
impl ConnectionManager {
pub fn send_tcp_message<T: ?Sized>(&mut self, t:MsgType, msg: &T) -> io::Result<()> where T: Serialize {
let t: u8 = num::ToPrimitive::to_u8(&t).unwrap();
let msg = &bincode::serialize(msg).unwrap()[..];
let conn = self.udp_connections.iter()
.find(|x| x.address == self.rendezvous_ip).unwrap();
let encrypted = &conn.symmetric_key.as_ref().unwrap().encrypt(&[&[t], msg].concat()[..])[..];
let msg_size = bincode::serialize(&encrypted.len()).unwrap();
let chained: &[u8] = &[&msg_size[..], encrypted].concat()[..];
self.rendezvous_socket.write_all(&chained[..])?;
Ok(())
}
pub fn send_tcp_message_public_key<T: ?Sized>(&mut self, t:MsgType, msg: &T) -> io::Result<()> where T: Serialize {
let t: u8 = num::ToPrimitive::to_u8(&t).unwrap();
let msg = &bincode::serialize(msg).unwrap()[..];
let key = self.rendezvous_public_key.as_ref().unwrap();
let encrypted = &key.encrypt(&[&[t], msg].concat()[..]);
let msg_size = bincode::serialize(&encrypted.len()).unwrap();
let chained: &[u8] = &[&msg_size[..], encrypted].concat()[..];
self.rendezvous_socket.write_all(&chained[..])?;
Ok(())
}
/// Send a UDP packet which optionally can be reliable
pub fn send_udp_message<T: ?Sized>(&mut self, public_key: Option<NetworkedPublicKey>, t: MsgType, msg: &T, reliable: bool, custom_id: Option<u32>) -> Result<(), &'static str> where T: Serialize {
let rendezvous_ip = self.rendezvous_ip.clone();
let conn = match public_key {
Some(public_key) => {
match self.udp_connections.iter_mut()
.find(|c|
if c.associated_peer.is_some() {c.associated_peer.as_ref().unwrap() == &public_key} else {false}
) {
Some(conn) => conn,
None => {
Tui::debug_message(&format!("Cannot find udp connection with public key: ({})", public_key), DebugMessageType::Error, &self.ui_s);
return Err("Cannot find udp connection");
}
}
}
None => self.udp_connections.iter_mut().find(|c| c.address == rendezvous_ip).unwrap()
};
conn.send_udp_message(t, msg, reliable, custom_id);
Ok(())
}
} |
extern crate euclid;
extern crate rustybuzz;
extern crate webrender;
extern crate webrender_api;
// use font_kit::font;
use std::str::FromStr;
use crate::fragment::{Point, Rect, Size, TextFragment};
use crate::glyph::{GlyphData, GlyphStore};
mod fragment;
mod glyph;
pub fn layout_text(width: i32, text: String) -> Vec<TextFragment> {
let point_size = 16.0;
let line_height = 18;
let font = font_kit::source::SystemSource::new()
.select_by_postscript_name("monospace")
.unwrap()
.load()
.unwrap();
let data = font.copy_font_data().expect("Failed to load font data");
let mut face = rustybuzz::Face::from_slice(&data, 0).unwrap();
face.set_points_per_em(Some(point_size));
let buzz_features = [rustybuzz::Feature::from_str("kern[0]").unwrap()];
let mut buffer = rustybuzz::UnicodeBuffer::new();
buffer.push_str(&text);
let glyph_buffer = rustybuzz::shape(&face, &buzz_features, buffer);
let mut fragments = Vec::<TextFragment>::new();
let mut store = GlyphStore::new();
let mut cur_block = 0;
for (info, position) in glyph_buffer.glyph_infos().iter().zip(glyph_buffer.glyph_positions()) {
let data = GlyphData::new(info.codepoint, position.x_advance, position.x_offset, position.y_offset);
if store.length() + position.x_advance <= width {
store.add_glyph(data);
} else {
let size = Size::new(store.length(), line_height);
let origin = Point::new(0, cur_block);
let rect = Rect::new(origin, size);
let fragment = TextFragment::new(store, rect);
fragments.push(fragment);
cur_block += line_height;
store = GlyphStore::new();
}
}
fragments
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
|
use crate::single_disk_farm::piece_reader::PieceReader;
use crate::utils::archival_storage_pieces::ArchivalStoragePieces;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::future::Future;
use subspace_core_primitives::{Piece, PieceIndex, PieceOffset, SectorIndex};
use subspace_farmer_components::plotting::PlottedSector;
use tracing::{trace, warn};
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
struct PieceDetails {
disk_farm_index: u8,
sector_index: SectorIndex,
piece_offset: PieceOffset,
}
/// Wrapper data structure for pieces plotted under multiple plots and corresponding piece readers,
/// it also maintains filter in given [`ArchivalStoragePieces`].
#[derive(Debug)]
pub struct ReadersAndPieces {
readers: Vec<PieceReader>,
pieces: HashMap<PieceIndex, Vec<PieceDetails>>,
archival_storage_pieces: ArchivalStoragePieces,
}
impl ReadersAndPieces {
pub fn new(readers: Vec<PieceReader>, archival_storage_pieces: ArchivalStoragePieces) -> Self {
Self {
readers,
pieces: HashMap::new(),
archival_storage_pieces,
}
}
/// Check if piece is known and can be retrieved
pub fn contains_piece(&self, piece_index: &PieceIndex) -> bool {
self.pieces.contains_key(piece_index)
}
/// Read piece from one of the associated readers.
///
/// If piece doesn't exist `None` is returned, if by the time future is polled piece is no
/// longer in the plot, future will resolve with `None`.
pub fn read_piece(
&self,
piece_index: &PieceIndex,
) -> Option<impl Future<Output = Option<Piece>> + 'static> {
let piece_details = match self.pieces.get(piece_index) {
Some(piece_details) => piece_details
.first()
.copied()
.expect("Empty lists are not stored in the map; qed"),
None => {
trace!(
?piece_index,
"Piece is not stored in any of the local plots"
);
return None;
}
};
let mut reader = match self.readers.get(usize::from(piece_details.disk_farm_index)) {
Some(reader) => reader.clone(),
None => {
warn!(?piece_index, ?piece_details, "Plot offset is invalid");
return None;
}
};
Some(async move {
reader
.read_piece(piece_details.sector_index, piece_details.piece_offset)
.await
})
}
pub fn add_sector(&mut self, disk_farm_index: u8, plotted_sector: &PlottedSector) {
let mut new_piece_indices = Vec::new();
for (piece_offset, &piece_index) in
(PieceOffset::ZERO..).zip(plotted_sector.piece_indexes.iter())
{
let piece_details = PieceDetails {
disk_farm_index,
sector_index: plotted_sector.sector_index,
piece_offset,
};
match self.pieces.entry(piece_index) {
Entry::Occupied(mut entry) => {
entry.get_mut().push(piece_details);
}
Entry::Vacant(entry) => {
entry.insert(vec![piece_details]);
new_piece_indices.push(piece_index);
}
}
}
if !new_piece_indices.is_empty() {
self.archival_storage_pieces.add_pieces(&new_piece_indices);
}
}
pub fn delete_sector(&mut self, disk_farm_index: u8, plotted_sector: &PlottedSector) {
let mut deleted_piece_indices = Vec::new();
for (piece_offset, &piece_index) in
(PieceOffset::ZERO..).zip(plotted_sector.piece_indexes.iter())
{
let searching_piece_details = PieceDetails {
disk_farm_index,
sector_index: plotted_sector.sector_index,
piece_offset,
};
if let Entry::Occupied(mut entry) = self.pieces.entry(piece_index) {
let piece_details = entry.get_mut();
if let Some(index) =
piece_details
.iter()
.enumerate()
.find_map(|(index, piece_details)| {
(piece_details == &searching_piece_details).then_some(index)
})
{
piece_details.swap_remove(index);
}
// We do not store empty lists
if piece_details.is_empty() {
entry.remove_entry();
deleted_piece_indices.push(piece_index);
}
}
}
if !deleted_piece_indices.is_empty() {
self.archival_storage_pieces
.delete_pieces(&deleted_piece_indices);
}
}
pub fn piece_indices(&self) -> impl Iterator<Item = &PieceIndex> {
self.pieces.keys()
}
}
|
use super::super::alu::{internal_multiply_cycles, set_nz_flags, set_nz_flags64};
use super::super::{Cpu, Cycles, Memory};
use util::bits::Bits as _;
#[inline]
fn get_mulinstr_regs(instr: u32) -> (u32, u32, u32, u32) {
let rm = instr.bits(0, 3);
let rs = instr.bits(8, 11);
let rn = instr.bits(12, 15);
let rd = instr.bits(16, 19);
(rm, rs, rn, rd)
}
#[inline]
fn get_long_mulinstr_regs(instr: u32) -> (u32, u32, u32, u32) {
let rm = instr.bits(0, 3);
let rs = instr.bits(8, 11);
let rd_lo = instr.bits(12, 15);
let rd_hi = instr.bits(16, 19);
(rm, rs, rd_lo, rd_hi)
}
/// Multiply and accumulate registers
pub fn arm_mla(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let (rm, rs, rn, rd) = get_mulinstr_regs(instr);
let lhs = cpu.registers.read(rm);
let rhs = cpu.registers.read(rs);
let acc = cpu.registers.read(rn);
let res = lhs.wrapping_mul(rhs).wrapping_add(acc);
cpu.registers.write(rd, res);
let icycles = Cycles::ONE + internal_multiply_cycles(rhs, false);
memory.stall(icycles);
icycles
}
/// Multiply and accumulate registers, setting flags
pub fn arm_mlas(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let (rm, rs, rn, rd) = get_mulinstr_regs(instr);
let lhs = cpu.registers.read(rm);
let rhs = cpu.registers.read(rs);
let acc = cpu.registers.read(rn);
let res = lhs.wrapping_mul(rhs).wrapping_add(acc);
cpu.registers.write(rd, res);
set_nz_flags(cpu, res);
let icycles = Cycles::ONE + internal_multiply_cycles(rhs, false);
memory.stall(icycles);
icycles
}
/// Multiply registers
pub fn arm_mul(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let (rm, rs, _rn, rd) = get_mulinstr_regs(instr);
let lhs = cpu.registers.read(rm);
let rhs = cpu.registers.read(rs);
let res = lhs.wrapping_mul(rhs);
cpu.registers.write(rd, res);
let icycles = internal_multiply_cycles(rhs, false);
memory.stall(icycles);
icycles
}
/// Multiply registers, setting flags
pub fn arm_muls(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let (rm, rs, _rn, rd) = get_mulinstr_regs(instr);
let lhs = cpu.registers.read(rm);
let rhs = cpu.registers.read(rs);
let res = lhs.wrapping_mul(rhs);
cpu.registers.write(rd, res);
set_nz_flags(cpu, res);
let icycles = internal_multiply_cycles(rhs, false);
memory.stall(icycles);
icycles
}
/// Signed long multiply and accumulate
pub fn arm_smlal(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let (rm, rs, rd_lo, rd_hi) = get_long_mulinstr_regs(instr);
let rsv = cpu.registers.read(rs);
let lhs = cpu.registers.read(rm) as i32 as i64; // sign-extended
let rhs = rsv as i32 as i64; // sign-extended
let alo = cpu.registers.read(rd_lo) as u32 as i64; // zero-extended
let ahi = cpu.registers.read(rd_hi) as u32 as i64; // zero-extended
let acc = (ahi << 32) | alo;
let res = lhs.wrapping_mul(rhs).wrapping_add(acc);
let res_lo = (res & 0xFFFFFFFF) as u32;
let res_hi = ((res >> 32) & 0xFFFFFFFF) as u32;
cpu.registers.write(rd_lo, res_lo);
cpu.registers.write(rd_hi, res_hi);
let icycles = Cycles::ONE + internal_multiply_cycles(rsv, true);
memory.stall(icycles);
icycles
}
/// Signed long multiply and accumulate, setting flags
pub fn arm_smlals(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let (rm, rs, rd_lo, rd_hi) = get_long_mulinstr_regs(instr);
let rsv = cpu.registers.read(rs);
let lhs = cpu.registers.read(rm) as i32 as i64; // sign-extended
let rhs = rsv as i32 as i64; // sign-extended
let alo = cpu.registers.read(rd_lo) as u32 as i64; // zero-extended
let ahi = cpu.registers.read(rd_hi) as u32 as i64; // zero-extended
let acc = (ahi << 32) | alo;
let res = lhs.wrapping_mul(rhs).wrapping_add(acc);
let res_lo = (res & 0xFFFFFFFF) as u32;
let res_hi = ((res >> 32) & 0xFFFFFFFF) as u32;
cpu.registers.write(rd_lo, res_lo);
cpu.registers.write(rd_hi, res_hi);
set_nz_flags64(cpu, res as u64);
let icycles = Cycles::ONE + internal_multiply_cycles(rsv, true);
memory.stall(icycles);
icycles
}
/// Signed long multiply (32x32 to 64)
pub fn arm_smull(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let (rm, rs, rd_lo, rd_hi) = get_long_mulinstr_regs(instr);
let rsv = cpu.registers.read(rs);
let lhs = cpu.registers.read(rm) as i32 as i64;
let rhs = rsv as i32 as i64;
let res = lhs.wrapping_mul(rhs);
let res_lo = (res & 0xFFFFFFFF) as u32;
let res_hi = ((res >> 32) & 0xFFFFFFFF) as u32;
cpu.registers.write(rd_lo, res_lo);
cpu.registers.write(rd_hi, res_hi);
let icycles = internal_multiply_cycles(rsv, true);
memory.stall(icycles);
icycles
}
/// Signed long multiply, setting flags
pub fn arm_smulls(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let (rm, rs, rd_lo, rd_hi) = get_long_mulinstr_regs(instr);
let rsv = cpu.registers.read(rs);
let lhs = cpu.registers.read(rm) as i32 as i64;
let rhs = rsv as i32 as i64;
let res = lhs.wrapping_mul(rhs);
let res_lo = (res & 0xFFFFFFFF) as u32;
let res_hi = ((res >> 32) & 0xFFFFFFFF) as u32;
cpu.registers.write(rd_lo, res_lo);
cpu.registers.write(rd_hi, res_hi);
set_nz_flags64(cpu, res as u64);
let icycles = internal_multiply_cycles(rsv, true);
memory.stall(icycles);
icycles
}
/// Unsigned long multiply and accumulate
pub fn arm_umlal(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let (rm, rs, rd_lo, rd_hi) = get_long_mulinstr_regs(instr);
let rsv = cpu.registers.read(rs);
let lhs = cpu.registers.read(rm) as u64;
let rhs = rsv as u64;
let alo = cpu.registers.read(rd_lo) as u64;
let ahi = cpu.registers.read(rd_hi) as u64;
let acc = (ahi << 32) | alo;
let res = lhs.wrapping_mul(rhs).wrapping_add(acc);
let res_lo = (res & 0xFFFFFFFF) as u32;
let res_hi = ((res >> 32) & 0xFFFFFFFF) as u32;
cpu.registers.write(rd_lo, res_lo);
cpu.registers.write(rd_hi, res_hi);
let icycles = Cycles::ONE + internal_multiply_cycles(rsv, false);
memory.stall(icycles);
icycles
}
/// Unsigned long multiply and accumulate, setting flags
pub fn arm_umlals(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let (rm, rs, rd_lo, rd_hi) = get_long_mulinstr_regs(instr);
let rsv = cpu.registers.read(rs);
let lhs = cpu.registers.read(rm) as u64;
let rhs = rsv as u64;
let alo = cpu.registers.read(rd_lo) as u64;
let ahi = cpu.registers.read(rd_hi) as u64;
let acc = (ahi << 32) | alo;
let res = lhs.wrapping_mul(rhs).wrapping_add(acc);
let res_lo = (res & 0xFFFFFFFF) as u32;
let res_hi = ((res >> 32) & 0xFFFFFFFF) as u32;
cpu.registers.write(rd_lo, res_lo);
cpu.registers.write(rd_hi, res_hi);
set_nz_flags64(cpu, res);
let icycles = Cycles::ONE + internal_multiply_cycles(rsv, false);
memory.stall(icycles);
icycles
}
/// Unsigned long multiply (32x32 to 64)
pub fn arm_umull(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let (rm, rs, rd_lo, rd_hi) = get_long_mulinstr_regs(instr);
let rsv = cpu.registers.read(rs);
let lhs = cpu.registers.read(rm) as u64;
let rhs = rsv as u64;
let res = lhs.wrapping_mul(rhs);
let res_lo = (res & 0xFFFFFFFF) as u32;
let res_hi = ((res >> 32) & 0xFFFFFFFF) as u32;
cpu.registers.write(rd_lo, res_lo);
cpu.registers.write(rd_hi, res_hi);
let icycles = internal_multiply_cycles(rsv, false);
memory.stall(icycles);
icycles
}
/// Unsigned long multiply, setting flags
pub fn arm_umulls(cpu: &mut Cpu, memory: &mut dyn Memory, instr: u32) -> Cycles {
let (rm, rs, rd_lo, rd_hi) = get_long_mulinstr_regs(instr);
let rsv = cpu.registers.read(rs);
let lhs = cpu.registers.read(rm) as u64;
let rhs = rsv as u64;
let res = lhs.wrapping_mul(rhs);
let res_lo = (res & 0xFFFFFFFF) as u32;
let res_hi = ((res >> 32) & 0xFFFFFFFF) as u32;
cpu.registers.write(rd_lo, res_lo);
cpu.registers.write(rd_hi, res_hi);
set_nz_flags64(cpu, res);
let icycles = internal_multiply_cycles(rsv, false);
memory.stall(icycles);
icycles
}
|
use std::rc::Rc;
use std::cell::RefCell;
use std::path::{Path, PathBuf};
use std::fs::*;
use std::io::{Read, Write, Error as IoError, ErrorKind};
use std::error::Error;
use runic::*;
use res::Resources;
use movement::*;
use app::State;
use lsp::LanguageServer;
use toml;
#[derive(Debug)]
pub enum TabStyle {
Tab,
Spaces(usize)
}
pub struct Buffer {
pub fs_loc: Option<PathBuf>,
pub lines: Vec<String>,
// buffer view
pub res: Rc<RefCell<Resources>>,
line_layouts: Vec<Option<TextLayout>>,
viewport_start: usize,
viewport_end: usize,
pub cursor_line: usize,
pub cursor_col: usize,
pub show_cursor: bool,
pub tab_style: TabStyle,
pub tab_width: usize,
pub lang_server: Option<Rc<RefCell<LanguageServer>>>,
pub version: usize,
}
impl Buffer {
pub fn new(res: Rc<RefCell<Resources>>) -> Buffer {
let (default_indent_style, default_indent_width) = res.borrow().config.as_ref().and_then(|c| c.get("indent"))
.map(|c| (c.get("default-style"), c.get("width"))).and_then(|v| {
match (v.0.and_then(toml::Value::as_str), v.1.and_then(toml::Value::as_integer)) {
(Some("tabs"), Some(w)) => Some((TabStyle::Tab, w as usize)),
(Some("spaces"), Some(w)) => Some((TabStyle::Spaces(w as usize), w as usize)),
_ => None
}
}).unwrap_or((TabStyle::Tab,4));
Buffer {
fs_loc: None, lines: vec![String::from("")],
res, cursor_line: 0, cursor_col: 0, viewport_start: 0, viewport_end: 0,
line_layouts: vec![None], show_cursor: true, tab_style: default_indent_style, tab_width: default_indent_width,
lang_server: None, version: 0
}
}
pub fn load(fp: &Path, app: &mut State) -> Result<Buffer, Box<Error>> {
let mut path = if fp.is_relative() {
let mut cd = ::std::env::current_dir()?;
cd.push(fp);
cd
} else { PathBuf::from(fp) };
let fp_exists = path.exists();
let (default_indent_style, default_indent_width) = app.res.borrow().config.as_ref().and_then(|c| c.get("indent"))
.map(|c| (c.get("default-style"), c.get("width"))).map(|v| {
match (v.0.and_then(toml::Value::as_str), v.1.and_then(toml::Value::as_integer)) {
(Some("tabs"), Some(w)) => Ok((TabStyle::Tab, w as usize)),
(Some("spaces"), Some(w)) => Ok((TabStyle::Spaces(w as usize), w as usize)),
_ => Err(super::ConfigError::Invalid("default indent style"))
}
}).unwrap_or(Ok((TabStyle::Tab,4)))?;
let (lns, lay, ts) = if fp_exists {
let mut f = OpenOptions::new().read(true).write(true).open(&path)?;
let mut s : String = String::new();
f.read_to_string(&mut s)?;
let lns: Vec<String> = s.lines().map(String::from).collect();
let mut layouts = Vec::new();
let mut ts: Option<TabStyle> = None;
for i in 0..lns.len() { //replace with Vec::resize_default?
if ts.is_none() {
let mut ch = lns[i].chars();
ts = match ch.next() {
Some('\t') => Some(TabStyle::Tab),
Some(' ') => {
let mut n = 1;
while let Some(' ') = ch.next() { n += 1 }
Some(TabStyle::Spaces(n))
},
_ => None
};
}
layouts.push(None);
}
//println!("detected tab style = {:?}", ts);
(lns, layouts, ts.unwrap_or(default_indent_style))
} else {
(vec![String::from("")], vec![None], default_indent_style)
};
let buf = Buffer {
fs_loc: Some(path),
lines: lns, line_layouts: lay,
viewport_start: 0, viewport_end: 0, cursor_line: 0, cursor_col: 0, show_cursor: true,
res: app.res.clone(),
tab_style: ts, tab_width: default_indent_width,
lang_server: match fp.extension().and_then(|ext| ext.to_str()) {
Some(ext) => app.language_server_for_file_type(ext)?,
None => None
},
version: 0
};
if let Some(ref ls) = buf.lang_server {
let mut ls = ls.borrow_mut();
ls.document_did_open(&buf);
ls.send("textDocument/documentSymbol", object!{
"textDocument" => object!{
"uri" => String::from("file:///") + buf.fs_loc.as_ref().expect("buffer has location").to_str().unwrap(),
}
}).unwrap();
}
Ok(buf)
}
pub fn place_cursor(&mut self, mut cursor_col: usize, mut cursor_line: usize) {
let bl = &self.lines;
if bl.len() == 0 { cursor_line = 0; }
else {
if cursor_line >= bl.len() { cursor_line = bl.len()-1; }
let cln = &bl[cursor_line];
if cursor_col > cln.len() { cursor_col = cln.len(); }
/*let mut lowest_dist = 1000;
for (i,g) in &cln[..].grapheme_indices() {
let dist = (cursor_col - i).abs();
if dist < lowest_dist {
lowest_dist = dist;
cursor_col = i;
}
}*/
while !cln.is_char_boundary(cursor_col) { println!("{}", cursor_col); cursor_col += 1; }
}
while cursor_line < self.viewport_start {
self.viewport_start = self.viewport_start.saturating_sub(1);
}
while cursor_line >= self.viewport_end {
let len = self.viewport_end - self.viewport_start;
self.viewport_start = self.viewport_start.saturating_add(1);
self.viewport_end = self.viewport_start+len;
}
self.cursor_col = cursor_col;
self.cursor_line = cursor_line;
}
pub fn move_cursor(&mut self, (dx, dy): (isize,isize)) {
let mut cursor_col = self.cursor_col as isize + dx;
let mut cursor_line = self.cursor_line as isize + dy;
if cursor_col < 0 { cursor_col = 0; }
if cursor_line < 0 { cursor_line = 0; }
self.place_cursor(cursor_col as usize, cursor_line as usize);
}
pub fn curr_loc(&self) -> (usize, usize) {
(self.cursor_col, self.cursor_line)
}
// scan from cursor looking for character. possibly absurdly made and could be done better with
// a better buffer representation
pub fn scan_line<P: Fn(char)->bool>(&self, pred: P, forwards: bool) -> Option<usize> {
let line_chars = self.lines[self.cursor_line].char_indices();
(if forwards {
println!("fwd");
for (i, c) in line_chars {
if i < self.cursor_col { continue }
println!("{:?}", (i,c));
if pred(c) { return Some(i); }
}
None //line_chars.take(self.cursor_col).inspect(|&v| print!("{:?}", v)).find(|&(_, c)| pred(c)).map(|(i, _)| i)
} else {
println!("rev");
for (i, c) in line_chars.rev() {
if i > self.cursor_col { continue }
println!("{:?}", (i,c));
if pred(c) { return Some(i); }
}
None
//line_chars.skip(self.cursor_col).inspect(|&v| print!("{:?}", v)).find(|&(_, c)| pred(c)).map(|(i, _)| i)
})
/*let (left, right) = self.lines[self.cursor_line].split_at(self.cursor_col + if forwards {1} else {0});
//println!("({}, {})", left, right);
if forwards {
right.find(pred).map(|v| v as isize + 1)
} else {
left.rfind(pred).map(|v| -(left.len() as isize - v as isize))
}*/
}
/// calculate the range of a movement from where the cursor is currently located to the end of
/// the movement, in the range start <= x < end, like (start..end). The first tuple will usually be the same as the current cursor
/// location except in cases where the movement includes an entire line, for instance. The
/// second tuple is the end of the movement absolute.
pub fn movement_range(&mut self, mv: &Movement) -> ::std::ops::Range<(usize, usize)> {
fn wrapadd1(a: usize, b: bool) -> usize {
if b { a.saturating_add(1) } else { a.saturating_sub(1) }
}
//println!("movement = {:?}", mv);
let cur = self.curr_loc();
match *mv {
Movement::Char(right) => (cur..(wrapadd1(cur.0, right), cur.1)),
Movement::Line(up, m) => match m {
Inclusion::Linewise => (cur..(cur.0, wrapadd1(cur.1, !up))),
Inclusion::Inclusive => ((0, cur.1)..(0, wrapadd1(cur.1, !up))),
Inclusion::Exclusive => panic!("Exclusive line movement")
},
Movement::CharScan { query, direction, inclusion, place_to_side } => {
match self.scan_line(|q| q==query, direction) {
Some(col) => { (cur..(if place_to_side { wrapadd1(col, !direction) } else { col }, cur.1)) },
None => (cur..cur)
}
}
Movement::Word(direction, inclusion) => {
/*
* if we're on whitespace => move to next non-whitespace character
* if we're on alphanumun => move until we hit non-alphanum character
* if whitespace than move one past that
* if we're on non-alphanum => move until we hit alphanum
*/
let mut v = cur..cur;
'main: while v.end.1 < self.lines.len() {
let mut chars: Box<Iterator<Item = (usize, char)>> = if direction {
Box::new(self.lines[v.end.1].char_indices().skip(v.end.0))
} else {
Box::new(self.lines[v.end.1].char_indices().rev().skip(self.lines[v.end.1].len()-v.end.0))
};
match chars.next() {
Some((i ,c)) => {
if char::is_alphanumeric(c) {
for (i, c) in chars {
if !char::is_alphanumeric(c) {
v.end = (i + if !char::is_whitespace(c) { 0 } else { 1 }, v.end.1);
break 'main;
}
}
} else if char::is_whitespace(c) {
for (i, c) in chars {
if !char::is_whitespace(c) { v.end = (i, v.end.1); break 'main; }
}
} else {
for (i, c) in chars {
if char::is_alphanumeric(c) { v.end = (i, v.end.1); break 'main; }
}
}
}
None => {
}
}
let y = wrapadd1(v.end.1, direction);
v.end = (if direction { 0 } else { self.lines[y].len() }, y);
}
v
},
Movement::EndOfLine => (cur..(self.lines[self.cursor_line].len()-1, cur.1)),
Movement::StartOfLine => (cur..(0,cur.1)),
Movement::Rep(count, ref movement) => {
let mut total_range = self.movement_range(movement);
let cp = self.curr_loc();
for i in 1..count {
println!("rep {:?} x {}/{} => {:?}", movement, count, i, total_range);
self.place_cursor(total_range.end.0, total_range.end.1);
let r = self.movement_range(movement);
if r.start.1 < total_range.start.1 || r.start.0 < total_range.start.0 {
total_range.start = r.start;
}
if r.end.1 > total_range.end.1 || r.end.0 > total_range.end.0 {
total_range.end = r.end;
}
}
self.place_cursor(cp.0, cp.1);
println!("rep {:?} x {} => {:?}", movement, count, total_range);
total_range
}
_ => panic!("unknown movement!")
}
}
pub fn make_movement(&mut self, mv: Movement) {
let new_pos = self.movement_range(&mv).end;
self.place_cursor(new_pos.0, new_pos.1);
}
pub fn delete_movement(&mut self, mv: Movement) -> String {
let mut removed = String::new();
// movement_range(mv) calculates range for movement mv. Must delete all selected
// lines+chars. Easy ranges are say (6, n) -> (10, n) where it's all in one line. the
// harder ranges are those like (0, n) -> (0, n+3) where it deletes whole lines, and the
// hardest are probably ones like (6,7) -> (8,12) where it deletes whole lines and
// intraline characters
println!("trying to delete movement ({:?})", mv);
let incm = mv.inclusion_mode();
let ::std::ops::Range { mut start, mut end } = self.movement_range(&mv);
println!("\tfrom {:?} to {:?}", start, end);
self.line_layouts[start.1] = None;
for line in (start.1)..(end.1) {
println!("\tline {}: {}", line, self.lines[line]);
self.line_layouts[line] = None;
}
if incm == Inclusion::Inclusive { end.0 += 1; }
if start.1 == end.1 { // all in the same line
if start.0 > self.lines[start.1].len() || end.0 > self.lines[start.1].len() { return removed; }
removed.push_str(&self.lines[start.1]
.drain(if start.0 > end.0 { (end.0)..(start.0) } else { (start.0)..(end.0) })
.collect::<String>());
} else {
for i in (start.1)..(end.1) {
removed.push_str(&self.lines.remove(i));
removed.push_str("\n");
self.line_layouts.remove(i);
}
}
if self.lines.len() == 0 {
self.lines.push(String::new());
self.line_layouts.push(None);
}
println!("\t removed: \"{}\"", removed);
self.move_cursor((0,0)); //ensure that the cursor is in a valid position
removed
}
pub fn yank_movement(&mut self, mv: Movement) -> String {
let mut selected = String::new();
println!("trying to yank movement ({:?})", mv);
let incm = mv.inclusion_mode();
let ::std::ops::Range { mut start, mut end } = self.movement_range(&mv);
println!("\tfrom {:?} to {:?}", start, end);
for line in (start.1)..(end.1) {
println!("\tline {}: {}", line, self.lines[line]);
}
if incm == Inclusion::Inclusive { end.0 += 1; }
if start.1 == end.1 { // all in the same line
selected.push_str(&self.lines[start.1][if start.0 > end.0 { (end.0)..(start.0) } else { (start.0)..(end.0) }]);
} else {
for i in (start.1)..(end.1) {
selected.push_str(&self.lines[i]);
selected.push_str("\n");
}
}
println!("\t yanked: \"{}\"", selected);
selected
/*match mv {
Movement::WholeLine => {
self.lines[self.cursor_line].clone() + "\n"
},
Movement::Rep(count, box Movement::WholeLine) => {
self.lines.iter().skip(self.cursor_line).take(count)
.fold(String::from(""), |s,l| s+&l+"\n")
},
_ => {
let offset = self.movement_cursor_offset(mv);
if offset.1 == 0 { //deleting within the current line
let last = min((offset.0 + self.cursor_col as isize) as usize, self.lines[self.cursor_line].len());
//println!("yanking: {}, {}", self.cursor_col, last);
let mut s = String::new();
s.push_str(&self.lines[self.cursor_line][if self.cursor_col > last { last..self.cursor_col } else { self.cursor_col..last }]);
s
} else {
panic!("tried to yank multiline range");
}
}
}*/
}
pub fn clear(&mut self) {
self.cursor_col = 0; self.cursor_line = 0;
self.lines.clear(); self.line_layouts.clear();
self.lines = vec![String::from("")];
self.line_layouts = vec![None];
}
pub fn invalidate_line(&mut self, line: usize) {
self.line_layouts[line] = None;
}
pub fn insert_char(&mut self, c: char) {
let loc = self.curr_loc();
self.lines[loc.1].insert(loc.0, c);
self.invalidate_line(loc.1);
self.move_cursor((1, 0));
}
pub fn delete_char(&mut self) {
let loc = self.curr_loc();
if loc.0 >= self.lines[loc.1].len() {
self.lines[loc.1].pop();
}
else {
self.lines[loc.1].remove(loc.0);
}
self.invalidate_line(loc.1);
}
fn compute_line_indent(&self, line: usize) -> usize {
let mut i = 0;
let mut ch = self.lines[line].chars();
let spaces_in_indent = match self.tab_style {
TabStyle::Spaces(n) => n,
TabStyle::Tab => self.tab_width
};
'main: loop {
match ch.next() {
Some('\t') => { i += 1; }
Some(' ') => {
let mut spaces = 0;
'space: loop {
match ch.next() {
Some(' ') => {
spaces += 1;
if spaces % spaces_in_indent == 0 {
spaces = 0;
i += 1;
}
},
Some('\t') => {
if spaces == 0 {
i += 1;
} else {
panic!("only heathens mix tabs and spaces");
}
},
Some(_) => { break 'space },
None => { break 'main }
}
}
},
Some(_) => {},
None => { break 'main }
}
}
i
}
fn indent_line(&self, ln: &mut String, i: usize) -> usize {
if i == 0 { return 0; }
let mut indent = String::new();
let ss = match self.tab_style {
TabStyle::Spaces(w) => {
for _ in 0..(w*i) { indent += " "; }
w*i
},
TabStyle::Tab => {
for _ in 0..i { indent += "\t"; }
i
}
};
*ln = indent + ln;
return ss;
}
pub fn break_line(&mut self) {
let loc = self.curr_loc();
let mut new_line = if loc.0 >= self.lines[loc.1].len() {
String::from("")
} else {
self.lines[loc.1].split_off(loc.0)
};
let indent = self.compute_line_indent(loc.1) + if new_line.len() == 0 { 1 } else { 0 };
let ln = self.indent_line(&mut new_line, indent);
self.lines.insert(loc.1+1, new_line);
self.invalidate_line(loc.1);
self.line_layouts.insert(loc.1, None);
self.viewport_end += 1;
self.cursor_col = ln;
self.move_cursor((0,1));
}
pub fn insert_line(&mut self, val: Option<&str>) {
let loc = self.cursor_line;
let mut line = val.map(|s| String::from(s)).unwrap_or_default();
let indent = self.compute_line_indent(loc);
let indent_ln = self.indent_line(&mut line, indent);
self.lines.insert(loc+1, line);
self.line_layouts.insert(loc+1, None);
self.viewport_end += 1;
self.cursor_col = indent_ln; self.move_cursor((0,1));
}
pub fn insert_tab(&mut self) {
match self.tab_style {
TabStyle::Spaces(num) => {
for _ in 0..num { self.insert_char(' '); }
},
TabStyle::Tab => {
self.insert_char('\t');
//sketchy cursor moves one tab at a time ⇒ can't break tabs in the middle. Why would anyone do that anyways...
}
}
}
pub fn insert_string(&mut self, s: &String) {
// if the string has '\n' at the end → then insert it on it's own new line
// else → start inserting in the middle of the current line
if s.as_bytes()[s.len()-1] == b'\n' {
for ln in s.lines() {
self.insert_line(Some(ln));
}
} else {
let mut lns = s.lines();
let fln = lns.next().unwrap();
let loc = self.curr_loc();
self.lines[loc.1].insert_str(loc.0, fln);
self.invalidate_line(loc.1);
self.move_cursor((fln.len() as isize, 0));
for ln in lns {
self.insert_line(Some(ln));
}
}
}
pub fn sync_disk(&mut self) -> Result<(), IoError> {
let lines = self.lines.iter();
match self.fs_loc {
Some(ref path) => {
let mut f = OpenOptions::new().write(true).truncate(true).create(true).open(path.as_path())?;
for ln in lines {
write!(f, "{}\n", ln)?;
}
f.sync_all()?;
if let Some(ref mut ls) = self.lang_server.clone() {
ls.borrow_mut().document_did_save(self);
}
Ok(())
},
None => Err(IoError::new(ErrorKind::NotFound, "sync_disk with no file backing"))
}
}
pub fn paint(&mut self, rx: &mut RenderContext, bnd: Rect) {
//draw text
let mut p = Point::xy(bnd.x, bnd.y);
let mut line = self.viewport_start;
rx.set_color(Color::rgb(0.9, 0.9, 0.9));
'lineloop: while line < self.line_layouts.len() {
let mut replace = false;
match self.line_layouts[line] {
Some(ref l) => {
let b = l.bounds();
if p.y + b.h > bnd.y+bnd.h { break 'lineloop; }
rx.draw_text_layout(p, &l);
//draw cursor
if self.show_cursor && line == self.cursor_line {
let col = self.cursor_col;
let mut cb = self.line_layouts[self.cursor_line].as_ref().map_or(Rect::xywh(0.0, 0.0, 8.0, 8.0), |v| v.char_bounds(col));
if cb.w == 0.0 { cb.w = 8.0; }
rx.set_color(Color::rgba(0.8, 0.6, 0.0, 0.9));
rx.fill_rect(cb.offset(p));
rx.set_color(Color::rgb(0.9, 0.9, 0.9));
}
p.y += b.h;
}
None => {
replace = true; // a hacky way to get around the fact that the match borrows self.line_layouts,
// so we can't assign to it until we escape the scope
}
}
if replace {
self.line_layouts[line] = rx.new_text_layout(&self.lines[line], &self.res.borrow().font,
bnd.w, bnd.h).ok();
} else {
line += 1;
}
}
self.viewport_end = line;
}
pub fn move_cursor_to_mouse(&mut self, p: Point) {
}
pub fn full_text(&self) -> String {
self.lines.iter().fold(String::new(), |a,i| a+i+"\n")
}
}
impl Drop for Buffer {
fn drop(&mut self) {
if let Some(ref mut ls) = self.lang_server.clone() {
ls.borrow_mut().document_did_close(self);
}
}
}
|
use clap::{crate_version, App, Arg, ArgMatches};
use dnsbl::{CheckResult, DNSBL};
use log::{debug, info, warn};
use serde_derive::Deserialize;
use std::collections::{HashMap, HashSet};
use std::io::Write;
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
use trust_dns::client::SyncClient;
use trust_dns::udp::UdpClientConnection;
#[derive(Debug, Deserialize, Default)]
struct Input {
dnsbls: HashSet<DNSBL>,
ips: IPSet,
}
impl Input {
fn new() -> Self {
Default::default()
}
fn merge(&mut self, rhs: Self) {
for dnsbl in rhs.dnsbls {
self.dnsbls.insert(dnsbl);
}
self.ips.merge(rhs.ips);
}
}
#[derive(Debug, Deserialize, Default)]
struct IPSet {
good: HashSet<std::net::IpAddr>,
bad: HashSet<std::net::IpAddr>,
unknown: HashSet<std::net::IpAddr>,
}
impl IPSet {
fn merge(&mut self, rhs: Self) {
for el in rhs.good {
self.good.insert(el);
}
for el in rhs.bad {
self.bad.insert(el);
}
for el in rhs.unknown {
self.unknown.insert(el);
}
}
fn len(&self) -> usize {
self.good.len() + self.bad.len() + self.unknown.len()
}
fn union(&self) -> Vec<&std::net::IpAddr> {
let mut res: Vec<_> = self.good.union(&self.bad).collect();
for ip in &self.unknown {
res.push(ip)
}
res
}
fn validate(&self) -> Result<(), String> {
// Validate that 'good', 'bad', and 'unknown' are fully disjoint.
// Anything else would be silly and is likely a user error.
let goodbad: Vec<_> = self.good.intersection(&self.bad).collect();
if goodbad.len() != 0 {
return Err(format!(
"'good' and 'bad' ips must be disjoint; shared '{}'",
goodbad
.iter()
.map(|i| i.to_string())
.collect::<Vec<_>>()
.join(", ")
));
}
let goodunknown: Vec<_> = self.good.intersection(&self.unknown).collect();
if goodunknown.len() != 0 {
return Err(format!(
"'good' and 'check' ips must be disjoint; shared '{}'",
goodunknown
.iter()
.map(|i| i.to_string())
.collect::<Vec<_>>()
.join(", ")
));
}
let badunknown: Vec<_> = self.bad.intersection(&self.unknown).collect();
if badunknown.len() != 0 {
return Err(format!(
"'bad' and 'check' ips must be disjoint; shared '{}'",
badunknown
.iter()
.map(|i| i.to_string())
.collect::<Vec<_>>()
.join(", ")
));
}
Ok(())
}
}
fn main() -> Result<(), String> {
let matches = App::new("dnsbl-check")
.version(crate_version!())
.arg(
Arg::with_name("debug")
.help("verbose output, default false")
.short("d"),
)
.arg(
Arg::with_name("file")
.long("file")
.short("f")
.takes_value(true)
.help("Input yaml file to use"),
)
.arg(
Arg::with_name("dnsbl")
.long("dnsbl")
.multiple(true)
.short("l")
.takes_value(true)
.help("a dnsbl to check"),
)
.arg(
Arg::with_name("good-ip")
.multiple(true)
.long("good-ip")
.short("g")
.takes_value(true)
.help("A known-good ip"),
)
.arg(
Arg::with_name("bad-ip")
.multiple(true)
.long("bad-ip")
.short("b")
.takes_value(true)
.help("A known-bad ip"),
)
.arg(
Arg::with_name("check-ip")
.multiple(true)
.long("check-ip")
.short("c")
.takes_value(true)
.help("An ip of unknown quality"),
)
.get_matches();
let mut input = Input::new();
if let Some(filename) = matches.value_of("file") {
input.merge(load_input_file(&filename)?);
}
input.merge(load_from_flags(&matches)?);
let debug = matches.is_present("debug");
{
let mut builder = env_logger::Builder::from_default_env();
if debug {
builder.filter_module("dnsbl", log::LevelFilter::Debug);
} else {
builder.filter_module("dnsbl", log::LevelFilter::Info);
}
builder.init();
}
// Okay, time to actually do some checking. No parallelism yet, that comes later
let resolver = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(8, 8, 8, 8)), 53);
let conn = UdpClientConnection::new(resolver).unwrap();
let client = SyncClient::new(conn);
input.ips.validate()?;
if input.dnsbls.len() == 0 || input.ips.len() == 0 {
return Err("At least one dnsbl or ip must be provided".to_string());
}
let mut results: HashMap<std::net::IpAddr, Vec<(DNSBL, CheckResult)>> = Default::default();
let mut last_percent = 0;
let all_ips = input.ips.union();
let len = all_ips.len();
for (i, ip) in all_ips.into_iter().enumerate() {
let mut listings = Vec::new();
for dnsbl in &input.dnsbls {
let res = dnsbl
.check_ip(&client, ip)
.map_err(|e| format!("Error lookup up '{}' on '{}': {}", ip, dnsbl, e))?;
if res.listed() {
listings.push((dnsbl.clone(), res));
}
}
results.insert(*ip, listings);
if i * 100 / len > last_percent {
println!("Progress: {}%", i * 100 / len);
last_percent = i * 100 / len;
}
}
print_stats(debug, &input.ips, results);
Ok(())
}
fn load_input_file(filename: &str) -> Result<Input, String> {
let f =
std::fs::File::open(filename).map_err(|e| format!("Could not open input file: {}", e))?;
let input: Input = serde_yaml::from_reader(f)
.map_err(|e| format!("Could not parse input file as yaml: {}", e))?;
Ok(input)
}
fn load_from_flags(matches: &ArgMatches) -> Result<Input, String> {
let mut input = Input::new();
if let Some(bls) = matches.values_of("dnsbl") {
for bl in bls {
let bl = parse_bl(&bl)?;
input.dnsbls.insert(bl);
}
}
if let Some(good_ips) = matches.values_of("good-ip") {
for ip in good_ips {
let ip = ip
.parse()
.map_err(|e| format!("invalid ip '{}': {}", ip, e))?;
input.ips.good.insert(ip);
}
}
if let Some(bad_ips) = matches.values_of("bad-ip") {
for ip in bad_ips {
let ip = ip
.parse()
.map_err(|e| format!("invalid ip '{}': {}", ip, e))?;
input.ips.bad.insert(ip);
}
}
if let Some(check_ips) = matches.values_of("check-ip") {
for ip in check_ips {
let ip = ip
.parse()
.map_err(|e| format!("invalid ip '{}': {}", ip, e))?;
input.ips.unknown.insert(ip);
}
}
Ok(input)
}
fn parse_bl(flag: &str) -> Result<DNSBL, String> {
let parts: Vec<&str> = flag.split(":").collect();
match parts.len() {
// 3 parts: 'name:host:record,record,record'
3 => {
let records = parts[2]
.split(",")
.map(|record| record.parse::<u8>())
.collect::<Result<Vec<_>, _>>()
.map_err(|err| {
format!(
"malformed record, must be a single octet in decimal: {}",
err
)
})?;
Ok(DNSBL::new(
parts[0].to_string(),
parts[1].to_string(),
records,
))
}
// 2 parts: 'name:host'
2 => Ok(DNSBL::new(
parts[0].to_string(),
parts[1].to_string(),
Vec::new(),
)),
// 1 part: 'host'
1 => Ok(DNSBL::new("".to_string(), parts[0].to_string(), Vec::new())),
_ => Err(format!(
"could not parse '{}'; expected 1 to 3 colon-separated parts, not {}",
flag,
parts.len()
)),
}
}
fn print_stats(debug: bool, ips: &IPSet, results: HashMap<IpAddr, Vec<(DNSBL, CheckResult)>>) {
let banned: Vec<_> = results.iter().filter(|(_, val)| val.len() > 0).collect();
let not_banned: Vec<_> = results.iter().filter(|(_, val)| val.len() == 0).collect();
let false_positives: Vec<_> = banned
.iter()
.filter(|(key, _)| ips.good.contains(key))
.collect();
let false_negatives: Vec<_> = not_banned
.iter()
.filter(|(key, _)| ips.bad.contains(key))
.collect();
let mut tw = tabwriter::TabWriter::new(Vec::new());
tw.write_all(
format!(
"Statistics:
Total ips\t{total}
Listed ips\t{listed}\t{listed_p}%
False positives\t{false_positives}\t{false_positives_p}%
False negatives\t{false_negatives}\t{false_negatives_p}%",
total = ips.len(),
listed = banned.len(),
listed_p = (banned.len() * 100) as f64 / ips.len() as f64,
false_positives = false_positives.len(),
false_positives_p = (false_positives.len() * 100) as f64 / ips.good.len() as f64,
false_negatives = false_negatives.len(),
false_negatives_p = (false_negatives.len() * 100) as f64 / ips.bad.len() as f64,
)
.as_bytes(),
)
.unwrap();
println!("{}", String::from_utf8(tw.into_inner().unwrap()).unwrap());
if debug {
println!(
"\nFalse positive ips:\n{}",
false_positives
.iter()
.map(|(ip, _)| ip.to_string())
.collect::<Vec<_>>()
.join("\n")
);
}
}
|
use std::collections::HashMap;
use std::time::Instant;
use crate::utils::file2vec;
pub fn day1(filename: &String)->(){
let mut contents:Vec<i32> = file2vec::<i32>(filename)
.iter().map(|x|x.to_owned().unwrap()).collect();
let mut map: HashMap<i32,usize> = HashMap::new();
for i in 0..contents.len(){
map.insert(2020-contents[i], i);
}
let mut part1_solution: Option<i32> = None;
for i in 0..contents.len(){
let ans = match map.get(&contents[i]) {
Some(ans) => {
(contents[i], contents[*ans])
},
None => (-1,-1)
};
if ans.0>=0{
println!("solution p1 is {}, {}", ans.0, ans.1);
part1_solution = Some(ans.0*ans.1);
break
}
}
let start = Instant::now();
let mut part2_solution: Option<i32> = None;
contents.sort();
for i in 0..contents.len()-2 {
let mut j = i+1;
let mut k = contents.len()-1;
while j<k {
let ans = contents[i] + contents[j] + contents[k];
part2_solution = if ans == 2020 {
println!("solution p2 is {}, {}, {}", contents[i], contents[j], contents[k]);
Some(contents[i]*contents[j]*contents[k])
}else if ans > 2020 {
k-=1;
None
} else{
j+=1;
None
};
match part2_solution {
None => (),
_ => break
};
}
if j<k{
break
}
}
println!("3sum took {:?}", start.elapsed());
println!("Day 1 Part 1: {}", part1_solution.unwrap_or(-1));
println!("Day 1 Part 2: {}", part2_solution.unwrap_or(-1));
} |
#[doc = "Register `CR` reader"]
pub type R = crate::R<CR_SPEC>;
#[doc = "Register `CR` writer"]
pub type W = crate::W<CR_SPEC>;
#[doc = "Field `ALGODIR` reader - Algorithm direction"]
pub type ALGODIR_R = crate::BitReader;
#[doc = "Field `ALGODIR` writer - Algorithm direction"]
pub type ALGODIR_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `ALGOMODE` reader - Algorithm mode"]
pub type ALGOMODE_R = crate::FieldReader;
#[doc = "Field `ALGOMODE` writer - Algorithm mode"]
pub type ALGOMODE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `DATATYPE` reader - Data type selection"]
pub type DATATYPE_R = crate::FieldReader;
#[doc = "Field `DATATYPE` writer - Data type selection"]
pub type DATATYPE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `KEYSIZE` reader - Key size selection (AES mode only)"]
pub type KEYSIZE_R = crate::FieldReader;
#[doc = "Field `KEYSIZE` writer - Key size selection (AES mode only)"]
pub type KEYSIZE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `FFLUSH` writer - FIFO flush"]
pub type FFLUSH_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CRYPEN` reader - Cryptographic processor enable"]
pub type CRYPEN_R = crate::BitReader;
#[doc = "Field `CRYPEN` writer - Cryptographic processor enable"]
pub type CRYPEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 2 - Algorithm direction"]
#[inline(always)]
pub fn algodir(&self) -> ALGODIR_R {
ALGODIR_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bits 3:5 - Algorithm mode"]
#[inline(always)]
pub fn algomode(&self) -> ALGOMODE_R {
ALGOMODE_R::new(((self.bits >> 3) & 7) as u8)
}
#[doc = "Bits 6:7 - Data type selection"]
#[inline(always)]
pub fn datatype(&self) -> DATATYPE_R {
DATATYPE_R::new(((self.bits >> 6) & 3) as u8)
}
#[doc = "Bits 8:9 - Key size selection (AES mode only)"]
#[inline(always)]
pub fn keysize(&self) -> KEYSIZE_R {
KEYSIZE_R::new(((self.bits >> 8) & 3) as u8)
}
#[doc = "Bit 15 - Cryptographic processor enable"]
#[inline(always)]
pub fn crypen(&self) -> CRYPEN_R {
CRYPEN_R::new(((self.bits >> 15) & 1) != 0)
}
}
impl W {
#[doc = "Bit 2 - Algorithm direction"]
#[inline(always)]
#[must_use]
pub fn algodir(&mut self) -> ALGODIR_W<CR_SPEC, 2> {
ALGODIR_W::new(self)
}
#[doc = "Bits 3:5 - Algorithm mode"]
#[inline(always)]
#[must_use]
pub fn algomode(&mut self) -> ALGOMODE_W<CR_SPEC, 3> {
ALGOMODE_W::new(self)
}
#[doc = "Bits 6:7 - Data type selection"]
#[inline(always)]
#[must_use]
pub fn datatype(&mut self) -> DATATYPE_W<CR_SPEC, 6> {
DATATYPE_W::new(self)
}
#[doc = "Bits 8:9 - Key size selection (AES mode only)"]
#[inline(always)]
#[must_use]
pub fn keysize(&mut self) -> KEYSIZE_W<CR_SPEC, 8> {
KEYSIZE_W::new(self)
}
#[doc = "Bit 14 - FIFO flush"]
#[inline(always)]
#[must_use]
pub fn fflush(&mut self) -> FFLUSH_W<CR_SPEC, 14> {
FFLUSH_W::new(self)
}
#[doc = "Bit 15 - Cryptographic processor enable"]
#[inline(always)]
#[must_use]
pub fn crypen(&mut self) -> CRYPEN_W<CR_SPEC, 15> {
CRYPEN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CR_SPEC;
impl crate::RegisterSpec for CR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cr::R`](R) reader structure"]
impl crate::Readable for CR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cr::W`](W) writer structure"]
impl crate::Writable for CR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CR to value 0"]
impl crate::Resettable for CR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
extern crate theca;
use theca::{Profile, BoolFlags};
use theca::item::Status;
#[test]
fn test_add_note() {
let mut p = Profile {
encrypted: false,
notes: vec![],
};
assert!(p.add_note("this is a title",
&[],
Some(Status::Blank),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 1);
assert_eq!(p.notes[0].id, 1);
assert_eq!(p.notes[0].title, "this is a title".to_string());
assert_eq!(p.notes[0].status, Status::Blank);
assert_eq!(p.notes[0].body, "".to_string());
}
#[test]
fn test_add_started_note() {
let mut p = Profile {
encrypted: false,
notes: vec![],
};
assert!(p.add_note("this is a title",
&[],
Some(Status::Started),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 1);
assert_eq!(p.notes[0].id, 1);
assert_eq!(p.notes[0].title, "this is a title".to_string());
assert_eq!(p.notes[0].status, Status::Started);
assert_eq!(p.notes[0].body, "".to_string());
}
#[test]
fn test_add_urgent_note() {
let mut p = Profile {
encrypted: false,
notes: vec![],
};
assert!(p.add_note("this is a title",
&[],
Some(Status::Urgent),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 1);
assert_eq!(p.notes[0].id, 1);
assert_eq!(p.notes[0].title, "this is a title".to_string());
assert_eq!(p.notes[0].status, Status::Urgent);
assert_eq!(p.notes[0].body, "".to_string());
}
#[test]
fn test_add_basic_body_note() {
let mut p = Profile {
encrypted: false,
notes: vec![],
};
assert!(p.add_note("this is a title",
&["and what?".to_string()],
Some(Status::Blank),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 1);
assert_eq!(p.notes[0].id, 1);
assert_eq!(p.notes[0].title, "this is a title".to_string());
assert_eq!(p.notes[0].status, Status::Blank);
assert_eq!(p.notes[0].body, "and what?".to_string());
}
#[test]
fn test_add_full_basic_body_note() {
let mut p = Profile {
encrypted: false,
notes: vec![],
};
assert!(p.add_note("this is a title",
&["and what?".to_string()],
Some(Status::Urgent),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 1);
assert_eq!(p.notes[0].id, 1);
assert_eq!(p.notes[0].title, "this is a title".to_string());
assert_eq!(p.notes[0].status, Status::Urgent);
assert_eq!(p.notes[0].body, "and what?".to_string());
}
#[test]
fn test_edit_note_title() {
let mut p = Profile {
encrypted: false,
notes: vec![],
};
assert!(p.add_note("this is a title",
&[],
Some(Status::Blank),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 1);
assert!(p.edit_note(1,
&"this is a new title".to_string(),
&[],
Some(Status::Blank),
false,
BoolFlags::default())
.is_ok());
assert_eq!(p.notes[0].id, 1);
assert_eq!(p.notes[0].title, "this is a new title".to_string());
assert_eq!(p.notes[0].status, Status::Blank);
assert_eq!(p.notes[0].body, "".to_string());
}
#[test]
fn test_edit_note_status() {
let mut p = Profile {
encrypted: false,
notes: vec![],
};
assert!(p.add_note("this is a title",
&[],
Some(Status::Blank),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 1);
assert!(p.edit_note(1,
&"".to_string(),
&[],
Some(Status::Started),
false,
BoolFlags::default())
.is_ok());
assert_eq!(p.notes[0].id, 1);
assert_eq!(p.notes[0].title, "this is a title".to_string());
assert_eq!(p.notes[0].status, Status::Started);
assert_eq!(p.notes[0].body, "".to_string());
assert!(p.edit_note(1,
&"".to_string(),
&[],
Some(Status::Urgent),
false,
BoolFlags::default())
.is_ok());
assert_eq!(p.notes[0].id, 1);
assert_eq!(p.notes[0].title, "this is a title".to_string());
assert_eq!(p.notes[0].status, Status::Urgent);
assert_eq!(p.notes[0].body, "".to_string());
assert!(p.edit_note(1,
&"".to_string(),
&[],
Some(Status::Blank),
false,
BoolFlags::default())
.is_ok());
assert_eq!(p.notes[0].id, 1);
assert_eq!(p.notes[0].title, "this is a title".to_string());
assert_eq!(p.notes[0].status, Status::Blank);
assert_eq!(p.notes[0].body, "".to_string());
}
#[test]
fn test_edit_note_body_basic() {
let mut p = Profile {
encrypted: false,
notes: vec![],
};
let flags = BoolFlags::default();
assert!(p.add_note("this is a title",
&[],
Some(Status::Blank),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 1);
assert!(p.edit_note(1,
&"".to_string(),
&["woo body".to_string()],
Some(Status::Blank),
false,
flags)
.is_ok());
assert_eq!(p.notes[0].id, 1);
assert_eq!(p.notes[0].title, "this is a title".to_string());
assert_eq!(p.notes[0].status, Status::Blank);
assert_eq!(p.notes[0].body, "woo body".to_string());
}
#[test]
fn test_edit_full_note() {
let mut p = Profile {
encrypted: false,
notes: vec![],
};
assert!(p.add_note("this is a title",
&[],
Some(Status::Blank),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 1);
assert!(p.edit_note(1,
&"this is a new title".to_string(),
&["woo body".to_string()],
Some(Status::Started),
false,
BoolFlags::default())
.is_ok());
assert_eq!(p.notes[0].id, 1);
assert_eq!(p.notes[0].title, "this is a new title".to_string());
assert_eq!(p.notes[0].status, Status::Started);
assert_eq!(p.notes[0].body, "woo body".to_string());
}
#[test]
fn test_delete_single_note() {
let mut p = Profile {
encrypted: false,
notes: vec![],
};
assert!(p.add_note("this is a title",
&[],
Some(Status::Blank),
false,
false,
false)
.is_ok());
p.delete_note(&[1]);
assert_eq!(p.notes.len(), 0);
}
#[test]
fn test_delete_some_notes() {
let mut p = Profile {
encrypted: false,
notes: vec![],
};
assert!(p.add_note("this is a title",
&[],
Some(Status::Blank),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 1);
assert!(p.add_note("this is a title",
&[],
Some(Status::Blank),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 2);
assert!(p.add_note("this is a title",
&[],
Some(Status::Blank),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 3);
p.delete_note(&[1, 3]);
assert_eq!(p.notes.len(), 1);
assert_eq!(p.notes[0].id, 2);
assert_eq!(p.notes[0].title, "this is a title".to_string());
assert_eq!(p.notes[0].status, Status::Blank);
assert_eq!(p.notes[0].body, "".to_string());
}
#[test]
fn test_clear_notes() {
let mut p = Profile {
encrypted: false,
notes: vec![],
};
assert!(p.add_note("this is a title",
&[],
Some(Status::Blank),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 1);
assert!(p.add_note("this is a title",
&[],
Some(Status::Blank),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 2);
assert!(p.add_note("this is a title",
&[],
Some(Status::Blank),
false,
false,
false)
.is_ok());
assert_eq!(p.notes.len(), 3);
assert!(p.clear(true).is_ok());
assert_eq!(p.notes.len(), 0);
}
|
use super::io::read_until_separator;
use httparse::Status;
use std::net::SocketAddr;
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
use tokio::net::TcpStream;
use url::Url;
#[cfg(not(any(target_os = "android", target_os = "linux")))]
pub fn get_origin_dst(_socket: &TcpStream) -> Option<SocketAddr> {
None
}
#[cfg(any(target_os = "android", target_os = "linux"))]
pub fn get_origin_dst(socket: &TcpStream) -> Option<SocketAddr> {
use nix::sys::socket::{getsockopt, sockopt, InetAddr};
use std::net::ToSocketAddrs;
use std::os::unix::io::AsRawFd;
let fd = socket.as_raw_fd();
let opt = sockopt::OriginalDst {};
match getsockopt(fd, opt) {
Ok(addr) => Some(InetAddr::V4(addr).to_std()),
Err(_) => None,
}
}
pub fn is_ok_response(buf: &[u8]) -> bool {
let mut headers = [httparse::EMPTY_HEADER; 32];
let mut res = httparse::Response::new(&mut headers);
match res.parse(buf) {
Ok(Status::Complete(_)) => {
//info!("code is {}", res.code.unwrap());
res.code.unwrap() < 300
}
_ => false,
}
}
pub async fn http_proxy_connect(proxy: &Url, remote: &str) -> Result<TcpStream, std::io::Error> {
let connect_str = format!(
"CONNECT {} HTTP/1.1\r\nHost: {}\r\nConnection: keep-alive\r\nProxy-Connection: keep-alive\r\n\r\n",
remote, remote
);
let raddr: Vec<SocketAddr> = match proxy.socket_addrs(|| None) {
Ok(m) => m,
Err(err) => {
error!(
"Failed to parse addr with error:{} from connect request:{}",
err, proxy
);
return Err(std::io::Error::from(std::io::ErrorKind::ConnectionAborted));
}
};
let connect_bytes = connect_str.into_bytes();
let conn = TcpStream::connect(&raddr[0]);
let dur = std::time::Duration::from_secs(3);
let s = tokio::time::timeout(dur, conn).await?;
let mut socket = match s {
Ok(s) => s,
Err(err) => {
error!("Failed to connect proxy:{} with err:{}", raddr[0], err);
return Err(err);
}
};
socket.write_all(&connect_bytes[..]).await?;
let (head, _) = read_until_separator(&mut socket, "\r\n\r\n").await?;
if is_ok_response(&head[..]) {
return Ok(socket);
}
Err(std::io::Error::from(std::io::ErrorKind::ConnectionAborted))
}
pub struct AsyncTcpStream {
s: TcpStream,
}
impl AsyncTcpStream {
pub fn new(s: TcpStream) -> Self {
Self { s }
}
}
impl futures::AsyncRead for AsyncTcpStream {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<futures::io::Result<usize>> {
let Self { s } = &mut *self;
pin_mut!(s);
s.poll_read(cx, buf)
}
}
impl futures::AsyncWrite for AsyncTcpStream {
fn poll_write(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<futures::io::Result<usize>> {
let Self { s } = &mut *self;
pin_mut!(s);
s.poll_write(cx, buf)
}
fn poll_flush(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Result<(), std::io::Error>> {
let Self { s } = &mut *self;
pin_mut!(s);
s.poll_flush(cx)
}
fn poll_close(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Result<(), std::io::Error>> {
let Self { s } = &mut *self;
pin_mut!(s);
s.poll_shutdown(cx)
}
}
|
#![feature(specialization)]
#[macro_use]
extern crate log;
extern crate exact_size_iterator_traits as iter_exact;
extern crate unreachable;
extern crate void;
#[macro_use]
pub mod typehack;
#[macro_use]
pub mod array;
#[macro_use]
pub mod linalg;
#[macro_use]
pub mod geometry;
pub mod num;
|
use std::collections::BinaryHeap;
pub mod event;
pub mod line_segment;
use line_segment::LineSegment;
use line_segment::Point;
// Intersection: two lines and the point they overlap
struct Intersection {
lines: [LineSegment; 2],
overlap: Point,
}
fn find_all_intersections(mut lines: BinaryHeap<LineSegment>) -> Vec<Intersection> {
if lines.is_empty() {
return vec![];
}
//use events::Event::*;
//let mut es = BinaryHeap::new();
// TODO: Cannot check for neighboring line segments with the Binary Heap. Create custom heap?
//es.append(Upper(lines.pop()));
//es.append(Upper(lines.pop()));
unimplemented!() // TODO: Implement
}
fn find_intersection(a: LineSegment, b: LineSegment) -> Option<Intersection> {
unimplemented!() // TODO: Implement
}
#[test]
fn no_lines() {
//let mut lines = BTreeSet::new();
let lines = BinaryHeap::new();
let intersections = find_all_intersections(lines);
assert!(intersections.is_empty())
}
#[test]
fn one_line() {
//let mut lines = BTreeSet::new();
let mut lines = BinaryHeap::new();
lines.push(LineSegment::new((0, 1), (0, 2)));
let intersections = find_all_intersections(lines);
assert!(intersections.is_empty())
}
#[test]
fn two_lines() {
//let mut lines = BTreeSet::new();
let mut lines = BinaryHeap::new();
lines.push(LineSegment::new((0, 1), (0, 2)));
lines.push(LineSegment::new((0, -1), (0, -2)));
let intersections = find_all_intersections(lines);
assert!(intersections.is_empty())
}
#[test]
fn two_lines_one_intersections() {
//let mut lines = BTreeSet::new();
let mut lines = BinaryHeap::new();
lines.push(LineSegment::new((2, 2), (-2, -2)));
lines.push(LineSegment::new((-2, 2), (2, -2)));
let intersections = find_all_intersections(lines);
assert!(!intersections.is_empty());
let zero: [i32; 2] = [0, 0];
assert_eq!(intersections[0].overlap, zero);
}
|
use std::fs;
fn main() {
let input = fs::read_to_string("src/02/input.txt").expect("error reading input");
println!("part 1: {}", intcode(&input, (12, 2))); // 2692315
let mut params: (usize, usize) = (0, 0);
loop {
if intcode(&input, params) == 19690720 {
break;
}
params = match params {
(99, 99) => break,
(99, b) => (0, b + 1),
(a, b) => (a + 1, b),
}
}
println!("part 2: {}", (100 * params.0) + params.1); // 9507
}
fn intcode(input: &String, params: (usize, usize)) -> usize {
let mut program = input
.split(',')
.map(|x| x.parse::<usize>().unwrap())
.collect::<Vec<usize>>();
program[1] = params.0;
program[2] = params.1;
let mut i = 0;
loop {
let (op1, op2, target) = (program[i + 1], program[i + 2], program[i + 3]);
match program[i] {
1 => {
program[target] = program[op1] + program[op2];
i += 4;
}
2 => {
program[target] = program[op1] * program[op2];
i += 4;
}
99 => break,
_ => panic!("wtf? {}", program[i]),
}
i += 4;
}
return program[0];
}
#[test]
fn test_parse() {
println!("{:?}", parse(String::from("1002,0,0,1")));
}
fn parse(input: String) -> Vec<Instruction> {
let mut data: Vec<_> = input.split(',').collect();
let opcode = format!("{:0>5}", data[0]);
let mut i = opcode.chars();
let pm3 = mode(i.next().unwrap());
let pm2 = mode(i.next().unwrap());
let pm1 = mode(i.next().unwrap());
let op = i.collect::<String>();
println!("pm1:{:?},pm2:{:?},pm3:{:?}", pm1, pm2, pm3);
println!("opcode:{:?}", opcode.chars().skip(3).collect::<String>());
let ret = Vec::new();
ret
}
#[test]
fn test_instruction() {
println!("{:?}", instruction(String::from("1002,11,22,33")));
}
fn instruction(input: String) -> Instruction {
let data = format!("{:0>5}", input);
let mut chars = data.chars();
let pm3 = mode(chars.next().unwrap());
let pm2 = mode(chars.next().unwrap());
let pm1 = mode(chars.next().unwrap());
let opcode = chars.collect::<String>().clone();
println!("opcode:{}", &opcode);
match opcode.as_str() {
"01" => Instruction::ADD(
Parameter {
mode: pm1,
value: 0,
},
Parameter {
mode: pm2,
value: 0,
},
Parameter {
mode: pm3,
value: 0,
},
),
"02" => Instruction::MULTIPLY(
Parameter {
mode: pm1,
value: 0,
},
Parameter {
mode: pm2,
value: 0,
},
Parameter {
mode: pm3,
value: 0,
},
),
"03" => Instruction::INPUT(Parameter {
mode: ParameterMode::POSITION,
value: 0,
}),
"04" => Instruction::OUTPUT(Parameter {
mode: ParameterMode::POSITION,
value: 0,
}),
_ => panic!("unrecognized opcode"),
}
}
fn mode(c: char) -> ParameterMode {
match c {
'0' => ParameterMode::IMMEDIATE,
'1' => ParameterMode::POSITION,
_ => panic!("unknown parameter mode"),
}
}
#[derive(Debug)]
struct Parameter {
mode: ParameterMode,
value: usize,
}
#[derive(Debug)]
enum Instruction {
ADD(Parameter, Parameter, Parameter),
MULTIPLY(Parameter, Parameter, Parameter),
INPUT(Parameter),
OUTPUT(Parameter),
}
#[derive(Debug)]
enum ParameterMode {
POSITION,
IMMEDIATE,
}
|
use std::{
convert::{TryFrom, TryInto},
fmt::Debug,
ops::{Deref, DerefMut},
};
use uuid::Uuid;
use crate::{
artist::{Artists, ArtistsWithRole},
audio::file::AudioFiles,
availability::Availabilities,
content_rating::ContentRatings,
external_id::ExternalIds,
restriction::Restrictions,
sale_period::SalePeriods,
util::{impl_deref_wrapped, impl_try_from_repeated},
Album, Metadata, RequestResult,
};
use librespot_core::{date::Date, Error, Session, SpotifyId};
use librespot_protocol as protocol;
#[derive(Debug, Clone)]
pub struct Track {
pub id: SpotifyId,
pub name: String,
pub album: Album,
pub artists: Artists,
pub number: i32,
pub disc_number: i32,
pub duration: i32,
pub popularity: i32,
pub is_explicit: bool,
pub external_ids: ExternalIds,
pub restrictions: Restrictions,
pub files: AudioFiles,
pub alternatives: Tracks,
pub sale_periods: SalePeriods,
pub previews: AudioFiles,
pub tags: Vec<String>,
pub earliest_live_timestamp: Date,
pub has_lyrics: bool,
pub availability: Availabilities,
pub licensor: Uuid,
pub language_of_performance: Vec<String>,
pub content_ratings: ContentRatings,
pub original_title: String,
pub version_title: String,
pub artists_with_role: ArtistsWithRole,
}
#[derive(Debug, Clone, Default)]
pub struct Tracks(pub Vec<SpotifyId>);
impl_deref_wrapped!(Tracks, Vec<SpotifyId>);
#[async_trait]
impl Metadata for Track {
type Message = protocol::metadata::Track;
async fn request(session: &Session, track_id: &SpotifyId) -> RequestResult {
session.spclient().get_track_metadata(track_id).await
}
fn parse(msg: &Self::Message, _: &SpotifyId) -> Result<Self, Error> {
Self::try_from(msg)
}
}
impl TryFrom<&<Self as Metadata>::Message> for Track {
type Error = librespot_core::Error;
fn try_from(track: &<Self as Metadata>::Message) -> Result<Self, Self::Error> {
Ok(Self {
id: track.try_into()?,
name: track.name().to_owned(),
album: track.album.get_or_default().try_into()?,
artists: track.artist.as_slice().try_into()?,
number: track.number(),
disc_number: track.disc_number(),
duration: track.duration(),
popularity: track.popularity(),
is_explicit: track.explicit(),
external_ids: track.external_id.as_slice().into(),
restrictions: track.restriction.as_slice().into(),
files: track.file.as_slice().into(),
alternatives: track.alternative.as_slice().try_into()?,
sale_periods: track.sale_period.as_slice().try_into()?,
previews: track.preview.as_slice().into(),
tags: track.tags.to_vec(),
earliest_live_timestamp: Date::from_timestamp_ms(track.earliest_live_timestamp())?,
has_lyrics: track.has_lyrics(),
availability: track.availability.as_slice().try_into()?,
licensor: Uuid::from_slice(track.licensor.uuid()).unwrap_or_else(|_| Uuid::nil()),
language_of_performance: track.language_of_performance.to_vec(),
content_ratings: track.content_rating.as_slice().into(),
original_title: track.original_title().to_owned(),
version_title: track.version_title().to_owned(),
artists_with_role: track.artist_with_role.as_slice().try_into()?,
})
}
}
impl_try_from_repeated!(<Track as Metadata>::Message, Tracks);
|
use ast;
use name::*;
use span::{Span, IntoSpan};
use middle::*;
use lang_items::*;
use collect_types::collect_types;
use collect_members::collect_members;
use tycheck::{populate_method, populate_constructor, populate_field};
use arena::Arena;
use rbtree::RbMap;
use std::fmt;
use std::borrow::ToOwned;
use std::collections::{HashMap, HashSet, VecDeque};
#[derive(Clone)]
pub enum Variable<'a, 'ast: 'a> {
LocalVariable(VariableRef<'a, 'ast>),
Field(FieldRef<'a, 'ast>),
}
impl<'a, 'ast> fmt::Debug for Variable<'a, 'ast> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self {
Variable::LocalVariable(var) => {
write!(f, "var {}: {}", var.fq_name, var.ty)
}
Variable::Field(field) => {
write!(f, "field {}: {}", field.fq_name, field.ty)
}
}
}
}
pub type TypesEnvironment<'a, 'ast> = RbMap<Symbol, TypeDefinitionRef<'a, 'ast>>;
pub type VariablesEnvironment<'a, 'ast> = RbMap<Symbol, Variable<'a, 'ast>>;
#[derive(Clone)]
pub struct Environment<'a, 'ast: 'a> {
pub types: TypesEnvironment<'a, 'ast>,
pub variables: VariablesEnvironment<'a, 'ast>,
pub toplevel: PackageRef<'a, 'ast>,
pub package: PackageRef<'a, 'ast>,
pub enclosing_type: TypeDefinitionRef<'a, 'ast>,
pub lang_items: LangItems<'a, 'ast>,
// Search here for more types.
pub on_demand_packages: Vec<PackageRef<'a, 'ast>>
}
impl<'a, 'ast> fmt::Debug for Environment<'a, 'ast> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
try!(writeln!(f, "Environment {{"));
try!(writeln!(f, "\ttypes:"));
for &(name, tyref) in self.types.iter() {
try!(writeln!(f, "\t\t{} => {}", name, tyref.fq_name));
}
try!(writeln!(f, "\tvars:"));
for &(name, ref var) in self.variables.iter() {
try!(writeln!(f, "\t\t{} => {:?}", name, var));
}
try!(writeln!(f, "\tcurrent package: {}", self.package.fq_name));
try!(writeln!(f, "\tenclosing type: {}", self.enclosing_type.fq_name));
try!(writeln!(f, "}}"));
Ok(())
}
}
pub type TypeEnvironmentPair<'a, 'ast> = (TypeDefinitionRef<'a, 'ast>, Environment<'a, 'ast>);
enum AmbiguousResult<'a, 'ast: 'a> {
Package(PackageRef<'a, 'ast>),
Type(TypeDefinitionRef<'a, 'ast>),
Expression(TypedExpression<'a, 'ast>),
Unknown
}
impl<'a, 'ast> Environment<'a, 'ast> {
/// Is `sub` a subtype of `sup`?
/// (Every type is a subtype of `Object`.)
pub fn is_subtype(&self, sub: TypeDefinitionRef<'a, 'ast>, sup: TypeDefinitionRef<'a, 'ast>) -> bool {
if sup == self.lang_items.object {
return true;
}
let mut q: VecDeque<TypeDefinitionRef<'a, 'ast>> = VecDeque::new();
let mut visited: HashSet<TypeDefinitionRef<'a, 'ast>> = HashSet::new();
q.push_back(sub);
visited.insert(sub);
while let Some(next) = q.pop_front() {
if next == sup {
return true;
}
for &parent in next.extends.iter().chain(next.implements.iter()) {
if visited.insert(parent) {
q.push_back(parent);
}
}
}
false
}
// Resolve extends and implements. This is done separately from walking
// the AST because we need to process the compilations in topological
// order (with respect to inheritance).
fn resolve_inheritance(&self, tydef: TypeDefinitionRef<'a, 'ast>) {
match tydef.ast.node {
ast::TypeDeclaration_::Class(ref class) => {
if let Some(ty) = class.node.extends.as_ref()
.and_then(|extension| self.resolve_class_extension(extension)) {
// This class extends a parent class.
tydef.extends.set(vec![ty]);
} else {
tydef.extends.set(vec![]);
}
tydef.implements.set(self.resolve_implements(&*class.node.implements));
}
ast::TypeDeclaration_::Interface(ref interface) => {
tydef.extends.set(self.resolve_interface_extensions(&*interface.node.extends));
tydef.implements.set(vec![]);
},
}
}
fn resolve_class_extension(&self, extension: &QualifiedIdentifier)
-> Option<TypeDefinitionRef<'a, 'ast>> {
self.resolve_type_name(&*extension.parts).map(|extended_type| {
if extended_type.kind == TypeKind::Interface {
// ($8.1.3, dOvs simple constraint 1)
span_error!(extension,
"class cannot extend interface");
}
if extended_type.has_modifier(ast::Modifier_::Final) {
// ($8.1.1.2/$8.1.3 dOvs simple constraint 4)
span_error!(extension,
"cannot extend final class `{}`",
extended_type.fq_name);
}
extended_type
})
}
fn resolve_interface_extensions(&self, extensions: &[QualifiedIdentifier])
-> Vec<TypeDefinitionRef<'a, 'ast>> {
let mut seen = HashMap::new();
for extension in extensions.iter() {
match self.resolve_type_name(&*extension.parts) {
Some(extended_type) => {
if extended_type.kind == TypeKind::Class {
// ($9.1.2)
span_error!(extension,
"interface cannot extend class");
}
// An interface must not be repeated in an implements clause,
// or in an extends clause of an interface.
// (JLS 8.1.4, dOvs simple constraint 3)
if seen.insert(extended_type.fq_name, extended_type).is_some() {
span_error!(extension,
"duplicate extended interface");
}
},
None => {
// an error was already printed
},
}
}
seen.into_iter().map(|(_, t)| t).collect()
}
fn resolve_implements(&self, implements: &[QualifiedIdentifier])
-> Vec<TypeDefinitionRef<'a, 'ast>> {
let mut seen = HashMap::new();
for implement in implements.iter() {
match self.resolve_type_name(&*implement.parts) {
Some(implemented_type) => {
if implemented_type.kind == TypeKind::Class {
// ($8.1.4, dOvs simple constraint 2)
span_error!(implement,
"class cannot implement class");
}
// An interface must not be repeated in an implements clause,
// or in an extends clause of an interface.
// (JLS 8.1.4, dOvs simple constraint 3)
if seen.insert(implemented_type.fq_name, implemented_type).is_some() {
span_error!(implement,
"duplicate implemented interface");
}
},
None => {
// an error was already printed
},
}
}
seen.into_iter().map(|(_, t)| t).collect()
}
pub fn resolve_type(&self, ty: &ast::Type) -> Type<'a, 'ast> {
match ty.node {
ast::Type_::SimpleType(ref simple_type) =>
if let Some(ty) = self.resolve_simple_type(simple_type) {
Type::SimpleType(ty)
} else {
Type::Unknown
},
ast::Type_::ArrayType(ref simple_type) =>
if let Some(ty) = self.resolve_simple_type(simple_type) {
Type::ArrayType(ty)
} else {
Type::Unknown
},
ast::Type_::Void => Type::Void,
}
}
pub fn resolve_simple_type(&self,
ty: &ast::SimpleType)
-> Option<SimpleType<'a, 'ast>> {
match ty.node {
ast::SimpleType_::Boolean => Some(SimpleType::Boolean),
ast::SimpleType_::Int => Some(SimpleType::Int),
ast::SimpleType_::Short => Some(SimpleType::Short),
ast::SimpleType_::Char => Some(SimpleType::Char),
ast::SimpleType_::Byte => Some(SimpleType::Byte),
ast::SimpleType_::Other(ref qident) =>
self.resolve_type_name(&*qident.parts)
.map(|ty| SimpleType::Other(ty)),
}
}
// Look up a package or type name, but always fails when a type is found.
// (This is because types in Joos never contain types.)
// Emits an error on failure.
fn resolve_package_or_type(&self, id: &[Ident]) -> Option<PackageRef<'a, 'ast>> {
(match id {
[] => panic!("bug: tried to resolve an empty package name"),
[ref ident] => {
// Because Java has nested types, we first have to check if
// there is a type obscuring `ident`.
match self.find_type(ident) {
Some(tydef) => {
span_error!(ident.span,
"`{}` refers to type `{}`, not a package",
ident, tydef.fq_name);
None
},
None => Some((self.toplevel, ident)),
}
}
[init.., ref last] => self.resolve_package_or_type(init).map(|package| (package, last)),
}).and_then(|(package, name)| match package.contents.borrow().get(&name.node) {
// FIXME: duplicated code with `resolve_package`...
Some(&PackageItem::Package(package)) => Some(package),
Some(&PackageItem::TypeDefinition(..)) => {
// There was a type instead!
span_error!(Span::range(&id[0], id.last().unwrap()),
"no such package `{}`; found a type instead",
Qualified(id.iter()));
None
}
None => {
span_error!(Span::range(&id[0], id.last().unwrap()),
"no such package `{}`",
Qualified(id.iter()));
None
}
})
}
// Look up a (user-defined) type by either a qualified or simple name.
// Emits an error on failure.
pub fn resolve_type_name(&self, id: &[Ident]) -> Option<TypeDefinitionRef<'a, 'ast>> {
match id {
[] => panic!("bug: tried to resolve an empty type name"),
// simple name
[ref ident] => match self.find_type(ident) {
Some(tydef) => Some(tydef),
None => {
span_error!(ident.span, "unresolved type name");
None
}
},
// fully-qualified name
// in Joos, `init` must refer to a package... but if it's a type, we need to error
[init.., ref last] => self.resolve_package_or_type(init).and_then(|package| {
match package.contents.borrow().get(&last.node) {
Some(&PackageItem::TypeDefinition(tydef)) => Some(tydef),
_ => {
span_error!(Span::range(&init[0], last),
"no such type `{}` in package `{}`",
last, Qualified(init.iter()));
None
}
}
})
}
}
// Look up a type by simple name, using the current environment.
// TODO: Clean up the error story here (don't want to emit multiple errors)
fn find_type(&self, ty: &Ident) -> Option<TypeDefinitionRef<'a, 'ast>> {
self.types.get(&ty.node)
.cloned()
.or_else(|| {
// Check the current package. This has higher precedence than on-demand imports.
match self.package.contents.borrow().get(&ty.node) {
Some(&PackageItem::TypeDefinition(tydef)) => Some(tydef),
_ => None,
}
})
.or_else(|| {
// If the type is not in the current environment, we can look in the
// on-demand import packages.
// It's necessary to look through every package to check for
// ambiguities.
let mut found_type: Option<TypeDefinitionRef<'a, 'ast>> = None;
for package in self.on_demand_packages.iter() {
match package.contents.borrow().get(&ty.node) {
Some(&PackageItem::TypeDefinition(typedef)) => {
// If we already have a type, then there's an ambiguity.
if let Some(existing) = found_type {
span_error!(ty.span,
"ambiguous type name `{}`: could refer to `{}` or `{}`",
ty,
typedef.fq_name,
existing.fq_name);
found_type = None;
break;
} else {
found_type = Some(typedef);
}
},
// Ignore subpackages.
Some(_) => {},
None => {},
}
}
found_type
})
}
pub fn resolve_field_access(&self, span: Span, texpr: TypedExpression<'a, 'ast>, name: &Ident)
-> Option<(TypedExpression_<'a, 'ast>, Type<'a, 'ast>)> {
match texpr.ty {
Type::SimpleType(SimpleType::Other(tyref)) => {
if let Some(&field) = tyref.fields.get(&name.node) {
self.check_field_access_allowed(span, field, tyref);
// Can't use static fields on expressions like a.MAX_VALUE
if field.is_static() {
span_error!(span, "using static field `{}` on instance", name);
}
Some((TypedExpression_::FieldAccess(box texpr, field),
field.ty.clone()))
} else {
span_error!(span,
"reference type `{}` has no field `{}`",
tyref.fq_name, name);
None
}
}
Type::SimpleType(_) => {
span_error!(span,
"primitive type `{}` has no field `{}`",
texpr.ty, name);
None
}
Type::ArrayType(_) => {
// FIXME: Use intrinsics (?) or something
if name.node == Symbol::from_str("length") {
Some((TypedExpression_::ArrayLength(box texpr),
Type::SimpleType(SimpleType::Int)))
} else {
span_error!(span,
"array type `{}` has no field `{}`",
texpr.ty, name);
None
}
}
Type::Null => {
span_error!(span,
"`null` has no field `{}`",
name);
None
}
Type::Void => {
span_error!(span, "void type has no fields");
None
}
Type::Unknown => None
}
}
// Resolve a named method.
pub fn resolve_named_method_access(&self, span: Span,
require_static: bool,
name: &QualifiedIdentifier,
targ_exprs: Vec<TypedExpression<'a, 'ast>>)
-> Option<(TypedExpression_<'a, 'ast>, Type<'a, 'ast>)> {
// ($15.12.1) Rules for handling named methods.
match &name.parts[..] {
[ref ident] => {
// "If it is a simple name, that is, just an Identifier,
// then the name of the method is the Identifier."
let resolved = self.resolve_typedef_method_access(span, self.enclosing_type,
ident, targ_exprs);
if let Some((TypedExpression_::MethodInvocation(_, _, method, _), _)) = resolved {
if !method.is_static && require_static {
span_error!(span, "calling non-static implicit this method on type");
}
if method.is_static {
span_error!(span, "calling static method without naming class");
}
}
resolved
},
[init.., ref last] => {
// "If it is a qualified name of the form TypeName . Identifier"
// "In all other cases, the qualified name has the
// form FieldName . Identifier" (note" spec has a mistake, it
// can be more than just fields - e.g. local variables)
match self.resolve_ambiguous_path(init) {
AmbiguousResult::Type(typedef) => {
let resolved = self.resolve_typedef_method_access(span, typedef,
last, targ_exprs);
if let Some((TypedExpression_::MethodInvocation(_, _, method, _), _)) = resolved {
if !method.is_static {
span_error!(span, "calling non-static method on type");
}
}
resolved
},
AmbiguousResult::Expression(texpr) => {
let resolved = self.resolve_expr_method_access(span, texpr,
last, targ_exprs);
if let Some((TypedExpression_::MethodInvocation(_, _, method, _), _)) = resolved {
if method.is_static {
span_error!(span, "calling static method on instance");
}
}
resolved
},
_ => {
span_error!(span, "no type for method invocation found");
None
},
}
},
[] => {
panic!("empty name?")
},
}
}
fn check_field_access_allowed(&self, span: Span, field: FieldRef<'a, 'ast>,
tyref: TypeDefinitionRef<'a, 'ast>) {
// ($6.6.2) Protected access rules.
// Access to protected members allowed within the same package
// or in subtypes.
if field.is_protected() && self.package != tyref.package {
if !self.is_subtype(self.enclosing_type, field.origin) ||
(!field.is_static() && !self.is_subtype(tyref, self.enclosing_type)) {
span_error!(span,
"cannot access protected field `{}` of `{}`",
field.fq_name, tyref.fq_name);
}
}
}
// ($6.2.2) Checks that we can access a given method.
fn check_method_access_allowed(&self, span: Span,
method: MethodRef<'a, 'ast>,
tyref: TypeDefinitionRef<'a, 'ast>) {
if let Protected(defining_type) = method.accessibility {
if self.package != tyref.package
&& (!self.is_subtype(self.enclosing_type, defining_type) ||
(!method.is_static && !self.is_subtype(tyref, self.enclosing_type))) {
span_error!(span,
"cannot access protected method `{}` of `{}` defined in `{}`",
method.fq_name, tyref.fq_name, defining_type.fq_name);
}
}
}
pub fn check_constructor_access_allowed(&self, span: Span, constructor: ConstructorRef<'a, 'ast>,
tyref: TypeDefinitionRef<'a, 'ast>) -> bool {
if constructor.is_protected() && self.package != tyref.package {
span_error!(span, "cannot access protected constructor of `{}`",
tyref.fq_name);
false
} else {
true
}
}
// Resolve a method that is called on an expression.
pub fn resolve_expr_method_access(&self, span: Span, texpr: TypedExpression<'a, 'ast>,
name: &Ident, targ_exprs: Vec<TypedExpression<'a, 'ast>>)
-> Option<(TypedExpression_<'a, 'ast>, Type<'a, 'ast>)> {
let arg_types: Vec<_> = targ_exprs.iter()
.map(|expr| expr.ty.clone())
.collect();
let signature = MethodSignature { name: name.node, args: arg_types };
match texpr.ty {
Type::SimpleType(SimpleType::Other(tyref)) => {
if let Some(&method) = tyref.methods.get(&signature) {
self.check_method_access_allowed(span, method, tyref);
Some((TypedExpression_::MethodInvocation(Some(box texpr),
signature,
method,
targ_exprs),
method.ret_ty.clone()))
} else {
span_error!(span,
"reference type `{}` has no method `{}`",
tyref.fq_name, signature);
None
}
}
Type::SimpleType(_) => {
span_error!(span,
"primitive type `{}` has no method `{}`",
texpr.ty, signature);
None
}
Type::ArrayType(_) => {
// Array types have the same methods as Object does.
self.resolve_typedef_method_access(span, self.lang_items.object,
name, targ_exprs)
}
Type::Null => {
span_error!(span,
"`null` has no method `{}`",
signature);
None
}
Type::Void => {
span_error!(span, "void type has no fields");
None
}
Type::Unknown => None
}
}
// Resolve a method that is called directly.
fn resolve_typedef_method_access(&self, span: Span, tyref: TypeDefinitionRef<'a, 'ast>,
name: &Ident, targ_exprs: Vec<TypedExpression<'a, 'ast>>)
-> Option<(TypedExpression_<'a, 'ast>, Type<'a, 'ast>)> {
let arg_types: Vec<_> = targ_exprs.iter()
.map(|expr| expr.ty.clone())
.collect();
let signature = MethodSignature { name: name.node, args: arg_types };
if let Some(&method) = tyref.methods.get(&signature) {
self.check_method_access_allowed(span, method, tyref);
Some((TypedExpression_::MethodInvocation(None, signature, method, targ_exprs),
method.ret_ty.clone()))
} else {
span_error!(span,
"reference type `{}` has no method `{}`",
tyref.fq_name, name);
None
}
}
// Resolve a simple name as a variable.
// DOES NOT emit any error.
fn resolve_variable(&self, ident: &Ident)
-> Option<(TypedExpression_<'a, 'ast>, Type<'a, 'ast>)> {
match self.variables.get(&ident.node) {
Some(&Variable::LocalVariable(var)) => {
// TODO: read the spec
Some((TypedExpression_::Variable(var), var.ty.clone()))
}
Some(&Variable::Field(field)) => {
if field.is_static() {
Some((TypedExpression_::StaticFieldAccess(field), field.ty.clone()))
} else {
// FIXME: `this` existence is only checked later
Some((TypedExpression_::ThisFieldAccess(field), field.ty.clone()))
}
}
None => {
None
}
}
}
// Convert a qualified identifier to an expression.
// Returns `None` to indicate that no viable interpretation was found.
// In this case, an error was already emitted.
pub fn resolve_expression(&self, path: &QualifiedIdentifier)
-> Option<(TypedExpression_<'a, 'ast>, Type<'a, 'ast>)> {
let span = path.into_span();
match &*path.parts {
[] => unreachable!(),
[ref ident] => {
let ret = self.resolve_variable(ident);
if let None = ret {
span_error!(span, "no such variable `{}`", ident);
}
ret
}
[init.., ref last] => {
match self.resolve_ambiguous_path(init) {
AmbiguousResult::Package(package) => {
span_error!(span,
"cannot take member `{}` of package `{}`",
last, package.fq_name);
None
}
AmbiguousResult::Type(tyref) => {
if let Some(&field) = tyref.fields.get(&last.node) {
self.check_field_access_allowed(span, field, tyref);
if !field.is_static() {
span_error!(span, "using non-static field `{}` on type", last);
}
Some((TypedExpression_::StaticFieldAccess(field),
field.ty.clone()))
} else {
span_error!(span,
"no such field `{}` in type `{}`",
last, tyref.fq_name);
None
}
}
AmbiguousResult::Expression(expr) => {
self.resolve_field_access(path.into_span(), expr, last)
}
AmbiguousResult::Unknown => None
}
}
}
}
// Returns `Unknown` iff an error is emitted.
fn resolve_ambiguous_path(&self, path: &[Ident]) -> AmbiguousResult<'a, 'ast> {
let span = Span::range(path.first().unwrap(), path.last().unwrap());
match path {
[] => unreachable!(),
[ref ident] => {
if let Some(expr) = self.resolve_variable(ident) {
let expr = TypedExpression::spanned(Span::range(path.first().unwrap().span,
path.last().unwrap().span),
expr);
AmbiguousResult::Expression(expr)
} else if let Some(tydef) = self.find_type(ident) {
AmbiguousResult::Type(tydef)
} else if let Some(&PackageItem::Package(package))
= self.toplevel.contents.borrow().get(&ident.node) {
// note: `toplevel` can only contain other packages, no types
AmbiguousResult::Package(package)
} else {
span_error!(ident.span,
"no variable, type, or package named `{}`",
ident);
AmbiguousResult::Unknown
}
}
[init.., ref last] => {
match self.resolve_ambiguous_path(init) {
AmbiguousResult::Expression(expr) => {
if let Some(expr) = self.resolve_field_access(span, expr, last) {
AmbiguousResult::Expression(TypedExpression::spanned(span, expr))
} else {
AmbiguousResult::Unknown
}
}
AmbiguousResult::Type(tydef) => {
match tydef.fields.get(&last.node) {
Some(&field) => {
let expr_ = (TypedExpression_::StaticFieldAccess(field),
field.ty.clone());
AmbiguousResult::Expression(TypedExpression::spanned(span, expr_))
}
None => {
span_error!(span,
"no field `{}` found in type `{}`",
last, tydef.fq_name);
AmbiguousResult::Unknown
}
}
}
AmbiguousResult::Package(package) => {
match package.contents.borrow().get(&last.node) {
Some(&PackageItem::Package(subpackage)) =>
AmbiguousResult::Package(subpackage),
Some(&PackageItem::TypeDefinition(tydef)) =>
AmbiguousResult::Type(tydef),
None => {
span_error!(span,
"no `{}` found in package `{}`",
last, package.fq_name);
AmbiguousResult::Unknown
}
}
}
AmbiguousResult::Unknown => AmbiguousResult::Unknown,
}
}
}
}
fn add_fields(&mut self, tydef: TypeDefinitionRef<'a, 'ast>) {
for (&name, &field) in tydef.fields.iter() {
self.add_field(name, field);
}
}
fn add_field(&mut self, name: Symbol, field: FieldRef<'a, 'ast>) {
if let Some(_) = self.variables.insert_in_place(name, Variable::Field(field)) {
panic!("bug: multiple fields with the same name in scope, somehow");
}
}
pub fn add_var(&mut self, name: Symbol, var: VariableRef<'a, 'ast>) {
if let Some(old) = self.variables.insert_in_place(name, Variable::LocalVariable(var)) {
if let (_, Variable::LocalVariable(v)) = *old {
span_error!(var.ast.span,
"variable `{}` already defined",
name);
span_note!(v.ast.span, "the old definition is here");
}
}
}
}
// Look up a package by its fully-qualified name.
fn resolve_package<'a, 'ast>(toplevel: PackageRef<'a, 'ast>, id: &[Ident]) -> Option<PackageRef<'a, 'ast>> {
let mut package = toplevel;
for (ix, ident) in id.iter().enumerate() {
package = match package.contents.borrow().get(&ident.node) {
Some(&PackageItem::Package(it)) => {
it // Found it
}
Some(&PackageItem::TypeDefinition(..)) => {
// There was a type instead!
span_error!(Span::range(&id[0], &id[ix]),
"no such package `{}`; found a type instead",
Qualified(id[0..ix+1].iter()));
return None
}
None => {
span_error!(Span::range(&id[0], &id[ix]),
"no such package `{}`",
Qualified(id[0..ix+1].iter()));
return None
}
};
}
Some(package)
}
fn insert_declared_type<'a, 'ast>(env: &TypesEnvironment<'a, 'ast>,
ident: &Ident,
typedef: TypeDefinitionRef<'a, 'ast>) -> TypesEnvironment<'a, 'ast> {
let (new_env, previous) = env.insert(ident.node, typedef);
if let Some(&(_, ref previous_item)) = previous {
if previous_item.fq_name != typedef.fq_name {
// TODO: Shouldn't continue after this error - how to do that?
span_error!(ident.span,
"type `{}` declared in this file conflicts with import `{}`",
ident,
previous_item.fq_name);
}
}
new_env
}
fn insert_type_import<'a, 'ast>(symbol: Symbol,
typedef: TypeDefinitionRef<'a, 'ast>,
imported: &QualifiedIdentifier,
current_env: TypesEnvironment<'a, 'ast>)
-> TypesEnvironment<'a, 'ast> {
let (new_env, previous_opt) = current_env.insert(symbol, typedef);
if let Some(previous) = previous_opt {
if previous.1.fq_name != typedef.fq_name {
span_error!(imported,
"importing `{}` from `{}` conflicts with previous import",
symbol,
imported);
}
}
new_env
}
fn import_single_type<'a, 'ast>(imported: &QualifiedIdentifier,
toplevel: PackageRef<'a, 'ast>,
current_env: TypesEnvironment<'a, 'ast>)
-> TypesEnvironment<'a, 'ast> {
match &*imported.parts {
[] => panic!("impossible: imported empty type"),
[ref id] => {
span_error!(id.span,
"imported type name must fully qualified");
current_env
},
// FIXME: Deduplicate this code with `resolve_type_name`
// (factor into `resolve_fq_type_name` or something)
[init.., ref last] => match resolve_package(toplevel, init) {
None => current_env,
Some(package) => match package.contents.borrow().get(&last.node) {
Some(&PackageItem::TypeDefinition(tydef)) => {
insert_type_import(last.node, tydef, imported, current_env)
}
_ => {
span_error!(Span::range(&init[0], last),
"no such type `{}` in package `{}`",
last, Qualified(init.iter()));
current_env
}
}
},
}
}
fn import_on_demand<'a, 'ast>(imported: &QualifiedIdentifier,
toplevel: PackageRef<'a, 'ast>,
on_demand_packages: &mut Vec<PackageRef<'a, 'ast>>) {
if let Some(package) = resolve_package(toplevel, &*imported.parts) {
on_demand_packages.push(package);
}
}
fn inheritance_topological_sort_search<'a, 'ast>(typedef: TypeDefinitionRef<'a, 'ast>,
seen: &mut HashSet<Name>,
visited: &mut HashSet<Name>,
stack: &mut Vec<Name>,
sorted: &mut Vec<Name>)
-> Result<(), ()> {
let parents = typedef.extends.iter().chain(typedef.implements.iter());
stack.push(typedef.fq_name);
if !seen.insert(typedef.fq_name) {
span_error!(typedef.ast.span,
"found an inheritance cycle: {:?}",
stack);
return Err(());
}
for parent in parents {
if !visited.contains(&parent.fq_name) {
try!(inheritance_topological_sort_search(*parent, seen,
visited, stack, sorted));
}
}
sorted.push(typedef.fq_name);
visited.insert(typedef.fq_name);
stack.pop();
Ok(())
}
fn inheritance_topological_sort<'a, 'ast>(preprocessed_types: &[TypeEnvironmentPair<'a, 'ast>])
-> Option<Vec<TypeEnvironmentPair<'a, 'ast>>> {
// To find items in processed_types by fully-qualified names.
let mut lookup = HashMap::new();
for &(typedef, ref env) in preprocessed_types.iter() {
lookup.insert(typedef.fq_name, (typedef, env.clone()));
}
let mut sorted: Vec<Name> = vec![];
let mut seen: HashSet<Name> = HashSet::new();
let mut visited: HashSet<Name> = HashSet::new();
// Keep track of the depth-first search stack for error message
// purposes (it shows the user where the cycle is).
let mut stack: Vec<Name> = vec![];
for &(typedef, _) in preprocessed_types.iter() {
if !visited.contains(&typedef.fq_name) {
let result = inheritance_topological_sort_search(
typedef, &mut seen, &mut visited,
&mut stack, &mut sorted);
if let Err(_) = result {
return None;
}
}
}
Some(sorted.iter().map(|name| lookup.get(name).unwrap().clone()).collect())
}
fn build_environments<'a, 'ast>(arena: &'a Arena<'a, 'ast>,
toplevel: PackageRef<'a, 'ast>,
lang_items: &LangItems<'a, 'ast>,
units: &[(PackageRef<'a, 'ast>, &'ast ast::CompilationUnit, TypeDefinitionRef<'a, 'ast>)])
-> Vec<(Environment<'a, 'ast>, ToPopulate<'a, 'ast>)> {
let mut preprocessed_types = vec![];
for &(package, ast, tydef) in units.iter() {
let mut types_env: TypesEnvironment<'a, 'ast> = RbMap::new();
let mut on_demand_packages = vec![lang_items.lang];
// Add all imports to initial environment for this compilation unit.
for import in ast.imports.iter() {
match import.node {
ast::ImportDeclaration_::SingleType(ref qident) => {
types_env = import_single_type(qident,
toplevel,
types_env);
},
ast::ImportDeclaration_::OnDemand(ref qident) => {
import_on_demand(qident,
toplevel,
&mut on_demand_packages);
},
}
}
// Uniquify `on_demand_packages`.
let on_demand_packages = on_demand_packages.into_iter()
.map(|package| (package.fq_name, package))
.collect::<HashMap<_, _> >()
.into_iter()
.map(|(_, package)| package)
.collect();
let env = Environment {
types: types_env,
variables: RbMap::new(),
toplevel: toplevel,
package: package,
enclosing_type: tydef,
lang_items: lang_items.clone(),
on_demand_packages: on_demand_packages,
};
env.resolve_inheritance(tydef);
preprocessed_types.push((tydef, env));
if tydef == lang_items.object {
// Make sure `java.lang.Object` is processed first...
// XXX: This is such a hack!
let ix = preprocessed_types.len()-1;
preprocessed_types.swap(0, ix);
}
}
if let Some(sorted) = inheritance_topological_sort(&preprocessed_types) {
preprocessed_types = sorted;
} else {
return vec![];
}
let mut r = vec![];
for (tydef, mut env) in preprocessed_types.into_iter() {
let name = tydef.ast.name();
// Add the type itself to the environment.
env.types = insert_declared_type(&env.types, name, tydef);
let to_populate = collect_members(arena, &env, tydef, lang_items);
env.add_fields(tydef);
if tydef.kind == TypeKind::Class {
// ($8.1.1.1) well-formedness contraint 4 - abstract methods => abstract class
let should_be_abstract =
tydef.methods.iter()
.any(|(_, &method)| matches!(Abstract, method.impled));
if should_be_abstract && !tydef.has_modifier(ast::Modifier_::Abstract) {
span_error!(tydef.ast.span,
"class with abstract methods must be abstract");
}
}
for v in to_populate.into_iter() {
r.push((env.clone(), v));
}
}
r
}
pub enum ToPopulate<'a, 'ast: 'a> {
Method(MethodImplRef<'a, 'ast>),
Constructor(ConstructorRef<'a, 'ast>),
Field(FieldRef<'a, 'ast>),
}
fn populate<'a, 'ast>(arena: &'a Arena<'a, 'ast>,
methods: Vec<(Environment<'a, 'ast>, ToPopulate<'a, 'ast>)>,
lang_items: &LangItems<'a, 'ast>) {
for (env, thing) in methods.into_iter() {
match thing {
ToPopulate::Method(method) => populate_method(arena, env, method, lang_items),
ToPopulate::Constructor(constructor) => populate_constructor(arena, env, constructor, lang_items),
ToPopulate::Field(field) => populate_field(arena, env, field, lang_items),
}
}
}
pub fn name_resolve<'a, 'ast>(arena: &'a Arena<'a, 'ast>, asts: &'ast [ast::CompilationUnit])
-> Universe<'a, 'ast> {
let toplevel = arena.alloc(Package::new("top level".to_owned()));
let default_package = arena.alloc(Package::new("default package".to_owned()));
let types = collect_types(arena, toplevel, default_package, asts);
let lang_items = find_lang_items(toplevel);
let methods = build_environments(arena, toplevel, &lang_items, &*types);
populate(arena, methods, &lang_items);
Universe { toplevel: toplevel, default: default_package, main: types[0].2 }
}
|
pub struct ConfigService {}
impl ConfigService {
pub fn create(&self) {}
}
|
/// Defines whether a term in a query must be present,
/// should be present or must not be present.
#[derive(Debug, Clone, Hash, Copy, Eq, PartialEq)]
pub enum Occur {
/// For a given document to be considered for scoring,
/// at least one of the document with the Should or the Must
/// Occur constraint must be within the document.
Should,
/// Document without the term are excluded from the search.
Must,
/// Document that contain the term are excluded from the
/// search.
MustNot,
}
|
#[doc = "Register `BDCR` reader"]
pub type R = crate::R<BDCR_SPEC>;
#[doc = "Register `BDCR` writer"]
pub type W = crate::W<BDCR_SPEC>;
#[doc = "Field `BREN` reader - Backup RAM retention in Standby and V<sub>BAT</sub> modes When this bit set, the backup regulator (used to maintain the backup RAM content in Standby and V<sub>BAT</sub> modes) is enabled. If BREN is cleared, the backup regulator is switched off. The backup RAM can still be used in Run and Stop modes. However its content is lost in Standby and V<sub>BAT</sub> modes. If BREN is set, the application must wait till the backup regulator ready flag (BRRDY) is set to indicate that the data written into the SRAM is maintained in Standby and V<sub>BAT</sub> modes."]
pub type BREN_R = crate::BitReader;
#[doc = "Field `BREN` writer - Backup RAM retention in Standby and V<sub>BAT</sub> modes When this bit set, the backup regulator (used to maintain the backup RAM content in Standby and V<sub>BAT</sub> modes) is enabled. If BREN is cleared, the backup regulator is switched off. The backup RAM can still be used in Run and Stop modes. However its content is lost in Standby and V<sub>BAT</sub> modes. If BREN is set, the application must wait till the backup regulator ready flag (BRRDY) is set to indicate that the data written into the SRAM is maintained in Standby and V<sub>BAT</sub> modes."]
pub type BREN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MONEN` reader - Backup domain voltage and temperature monitoring enable"]
pub type MONEN_R = crate::BitReader;
#[doc = "Field `MONEN` writer - Backup domain voltage and temperature monitoring enable"]
pub type MONEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `VBE` reader - V<sub>BAT</sub> charging enable Note: Reset only by POR,."]
pub type VBE_R = crate::BitReader;
#[doc = "Field `VBE` writer - V<sub>BAT</sub> charging enable Note: Reset only by POR,."]
pub type VBE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `VBRS` reader - V<sub>BAT</sub> charging resistor selection"]
pub type VBRS_R = crate::BitReader;
#[doc = "Field `VBRS` writer - V<sub>BAT</sub> charging resistor selection"]
pub type VBRS_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Backup RAM retention in Standby and V<sub>BAT</sub> modes When this bit set, the backup regulator (used to maintain the backup RAM content in Standby and V<sub>BAT</sub> modes) is enabled. If BREN is cleared, the backup regulator is switched off. The backup RAM can still be used in Run and Stop modes. However its content is lost in Standby and V<sub>BAT</sub> modes. If BREN is set, the application must wait till the backup regulator ready flag (BRRDY) is set to indicate that the data written into the SRAM is maintained in Standby and V<sub>BAT</sub> modes."]
#[inline(always)]
pub fn bren(&self) -> BREN_R {
BREN_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Backup domain voltage and temperature monitoring enable"]
#[inline(always)]
pub fn monen(&self) -> MONEN_R {
MONEN_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 8 - V<sub>BAT</sub> charging enable Note: Reset only by POR,."]
#[inline(always)]
pub fn vbe(&self) -> VBE_R {
VBE_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - V<sub>BAT</sub> charging resistor selection"]
#[inline(always)]
pub fn vbrs(&self) -> VBRS_R {
VBRS_R::new(((self.bits >> 9) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Backup RAM retention in Standby and V<sub>BAT</sub> modes When this bit set, the backup regulator (used to maintain the backup RAM content in Standby and V<sub>BAT</sub> modes) is enabled. If BREN is cleared, the backup regulator is switched off. The backup RAM can still be used in Run and Stop modes. However its content is lost in Standby and V<sub>BAT</sub> modes. If BREN is set, the application must wait till the backup regulator ready flag (BRRDY) is set to indicate that the data written into the SRAM is maintained in Standby and V<sub>BAT</sub> modes."]
#[inline(always)]
#[must_use]
pub fn bren(&mut self) -> BREN_W<BDCR_SPEC, 0> {
BREN_W::new(self)
}
#[doc = "Bit 1 - Backup domain voltage and temperature monitoring enable"]
#[inline(always)]
#[must_use]
pub fn monen(&mut self) -> MONEN_W<BDCR_SPEC, 1> {
MONEN_W::new(self)
}
#[doc = "Bit 8 - V<sub>BAT</sub> charging enable Note: Reset only by POR,."]
#[inline(always)]
#[must_use]
pub fn vbe(&mut self) -> VBE_W<BDCR_SPEC, 8> {
VBE_W::new(self)
}
#[doc = "Bit 9 - V<sub>BAT</sub> charging resistor selection"]
#[inline(always)]
#[must_use]
pub fn vbrs(&mut self) -> VBRS_W<BDCR_SPEC, 9> {
VBRS_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "PWR Backup domain control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`bdcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`bdcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct BDCR_SPEC;
impl crate::RegisterSpec for BDCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`bdcr::R`](R) reader structure"]
impl crate::Readable for BDCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`bdcr::W`](W) writer structure"]
impl crate::Writable for BDCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets BDCR to value 0"]
impl crate::Resettable for BDCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Reader of register PIDR4"]
pub type R = crate::R<u32, super::PIDR4>;
#[doc = "Reader of field `PIDR4`"]
pub type PIDR4_R = crate::R<u32, u32>;
impl R {
#[doc = "Bits 0:31 - peripheral ID4"]
#[inline(always)]
pub fn pidr4(&self) -> PIDR4_R {
PIDR4_R::new((self.bits & 0xffff_ffff) as u32)
}
}
|
use std::sync::Arc;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering;
use std::thread::current;
use crate::mechatronics::bucket_ladder::state::GlobalIntakeState;
use crate::mechatronics::bucket_ladder::state::IntakeStateInstance;
use crate::mechatronics::drive_train::state::DriveTrainStateInstance;
use crate::mechatronics::drive_train::state::GlobalDriveTrainState;
use crate::mechatronics::dumper::state::DumperStateInstance;
use crate::mechatronics::dumper::state::GlobalDumperState;
use crate::status::current::{CurrentStateJson, GlobalCurrentState};
use crate::status::life::GlobalLifeState;
use crate::status::life::LifeStateInstance;
pub struct GlobalRobotState {
life: Arc<GlobalLifeState>,
current: Arc<GlobalCurrentState>,
drive: Arc<GlobalDriveTrainState>,
dumper: Arc<GlobalDumperState>,
intake: Arc<GlobalIntakeState>,
cycles_per_sec: Arc<AtomicUsize>,
cycle_counter: Arc<AtomicUsize>,
}
impl GlobalRobotState {
pub fn new() -> Self {
Self {
life: Arc::new(GlobalLifeState::new()),
current: Arc::new(GlobalCurrentState::new()),
drive: Arc::new(GlobalDriveTrainState::new()),
dumper: Arc::new(GlobalDumperState::new()),
intake: Arc::new(GlobalIntakeState::new()),
cycles_per_sec: Arc::new(AtomicUsize::new(0)),
cycle_counter: Arc::new(AtomicUsize::new(0)),
}
}
pub fn get_life(&self) -> Arc<GlobalLifeState> {
self.life.clone()
}
pub fn get_current(&self) -> Arc<GlobalCurrentState> {
self.current.clone()
}
pub fn get_drive(&self) -> Arc<GlobalDriveTrainState> {
self.drive.clone()
}
pub fn get_dumper(&self) -> Arc<GlobalDumperState> {
self.dumper.clone()
}
pub fn get_intake(&self) -> Arc<GlobalIntakeState> {
self.intake.clone()
}
pub fn get_current_state(&self) -> RobotStateInstance {
RobotStateInstance::new(
self.life.get_current_state(),
self.current.get_json(),
self.drive.get_current_state(),
self.dumper.get_current_state(),
self.intake.get_current_state(),
self.cycles_per_sec.load(Ordering::SeqCst),
self.cycle_counter.load(Ordering::SeqCst),
)
}
pub fn get_cycles_per_second(&self) -> Arc<AtomicUsize> {
self.cycles_per_sec.clone()
}
pub fn get_cycle_counter(&self) -> Arc<AtomicUsize> {
self.cycle_counter.clone()
}
}
#[derive(Serialize)]
pub struct RobotStateInstance {
life: LifeStateInstance,
current: CurrentStateJson,
drive: DriveTrainStateInstance,
dumper: DumperStateInstance,
intake: IntakeStateInstance,
cycles_per_sec: usize,
cycle_counter: usize,
}
impl RobotStateInstance {
pub fn new(life: LifeStateInstance, current: CurrentStateJson, drive: DriveTrainStateInstance, dumper: DumperStateInstance,
intake: IntakeStateInstance, cycles_per_sec: usize, cycle_counter: usize) -> Self {
Self {
life,
current,
drive,
dumper,
intake,
cycles_per_sec,
cycle_counter,
}
}
pub fn get_life(&self) -> &LifeStateInstance {
&self.life
}
pub fn get_drive(&self) -> &DriveTrainStateInstance {
&self.drive
}
pub fn get_dumper(&self) -> &DumperStateInstance {
&self.dumper
}
pub fn get_intake(&self) -> &IntakeStateInstance {
&self.intake
}
} |
use futures::{Future, Stream};
use h2::server;
use http::{Response, StatusCode};
use tokio::net::TcpListener;
pub fn main() {
let addr = "127.0.0.1:5928".parse().unwrap();
let listener = TcpListener::bind(&addr).unwrap();
// Accept all incoming TCP connections.
let connection = listener
.incoming()
.for_each(move |socket| {
println!("connection opened: {:?}", socket);
// Spawn a new task to process each connection.
tokio::spawn({
// Start the HTTP/2.0 connection handshake
server::handshake(socket)
.and_then(|h2| {
// Accept all inbound HTTP/2.0 streams sent over the
// connection.
h2.for_each(|(request, mut respond)| {
println!("Received request: {:?}", request);
println!("Request URI: {:?}", request.uri().path());
let stream = request
.into_body()
.for_each(|msg| {
println!("Msg Received: {:?}", msg);
Ok(())
})
.and_then(|m| {
println!("stream all finished up: {:?}", m);
Ok(())
})
.map_err(|e| println!("ERR RecvStream: {:?}", e));
tokio::spawn(stream);
// Build a response with no body
let response =
Response::builder().status(StatusCode::OK).body(()).unwrap();
// Send the response back to the client
respond.send_response(response, true).unwrap();
Ok(())
})
})
.map_err(|e| panic!("unexpected error = {:?}", e))
});
Ok(())
})
.map_err(|e| println!("connection error: {:?}", e));
tokio::run(connection);
}
|
//! # Node
//!
//! A Tendermock `Node` encapsulates a storage and a chain.
//! A `SharedNode` is a thread-safe version of a node, for use by the various RPC interfaces.
//!
//! To integrate with IBC modules, the node implements the `Ics26Context` traits, which mainly deal
//! with storing and reading values from the store.
#![allow(unused_variables)]
mod bare;
mod error;
mod objects;
mod shared;
pub use bare::Node;
pub use error::Error;
pub use objects::Counter;
pub use shared::SharedNode;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.