text stringlengths 8 4.13M |
|---|
// Copyright (c) 2018 Hamid R. Ghadyani.
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Write [Workflows] for [Alfred][alfred.app] app with ease!
//!
//! This crate adds enhanced features and quality-of-life improvements to
//! [other alfred crate][alfred]'s basic functionality of creating **Script Filter** items.
//!
//! Using this crate to create your workflows, you can
//! - Set up automatic update of workflow ([`updater`] module).
//! - Painlessly read/write data related to workflow (settings, cache data, ...) ([`data`] module).
//!
//! [`updater`]: updater/index.html
//! [`data`]: data/index.html
//! [alfred]: https://crates.io/crates/alfred
//! [alfred.app]: http://www.alfredapp.com
//! [Workflows]: https://www.alfredapp.com/workflows/
//!
// TODO: check for "status" field of json returned by github to make sure it is fully uploaded
// before reporting that a release is available.
// TODO: Automatically update html_root_url's version when publishing to crates.io
// TODO: Use https://github.com/softprops/hubcaps for github API?
#![doc(html_root_url = "https://docs.rs/alfred-rs/0.7.1")]
extern crate alfred;
extern crate serde;
extern crate serde_json;
#[cfg(test)]
extern crate mockito;
#[macro_use]
extern crate log;
extern crate chrono;
extern crate env_logger;
extern crate semver;
#[macro_use]
extern crate serde_derive;
extern crate tempfile;
extern crate url;
use alfred::env;
use anyhow::Result;
use anyhow::{anyhow, bail};
pub mod data;
pub mod updater;
pub use self::data::Data;
pub use self::updater::Updater;
|
#[doc = "Register `PRAR_CUR` reader"]
pub type R = crate::R<PRAR_CUR_SPEC>;
#[doc = "Field `PROT_AREA_START` reader - Bank 1 lowest PCROP protected address"]
pub type PROT_AREA_START_R = crate::FieldReader<u16>;
#[doc = "Field `PROT_AREA_END` reader - Bank 1 highest PCROP protected address"]
pub type PROT_AREA_END_R = crate::FieldReader<u16>;
#[doc = "Field `DMEP` reader - Bank 1 PCROP protected erase enable option status bit"]
pub type DMEP_R = crate::BitReader;
impl R {
#[doc = "Bits 0:11 - Bank 1 lowest PCROP protected address"]
#[inline(always)]
pub fn prot_area_start(&self) -> PROT_AREA_START_R {
PROT_AREA_START_R::new((self.bits & 0x0fff) as u16)
}
#[doc = "Bits 16:27 - Bank 1 highest PCROP protected address"]
#[inline(always)]
pub fn prot_area_end(&self) -> PROT_AREA_END_R {
PROT_AREA_END_R::new(((self.bits >> 16) & 0x0fff) as u16)
}
#[doc = "Bit 31 - Bank 1 PCROP protected erase enable option status bit"]
#[inline(always)]
pub fn dmep(&self) -> DMEP_R {
DMEP_R::new(((self.bits >> 31) & 1) != 0)
}
}
#[doc = "FLASH protection address for bank 1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`prar_cur::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct PRAR_CUR_SPEC;
impl crate::RegisterSpec for PRAR_CUR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`prar_cur::R`](R) reader structure"]
impl crate::Readable for PRAR_CUR_SPEC {}
#[doc = "`reset()` method sets PRAR_CUR to value 0"]
impl crate::Resettable for PRAR_CUR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::mock::{
new_test_ext, AccountId, Balance, Balances, MockAccountIdConverter, MockRuntime, RuntimeEvent,
RuntimeOrigin, SelfChainId, SelfEndpointId, System, Transporter, USER_ACCOUNT,
};
use crate::{EndpointHandler, Error, Location, Transfer};
use codec::Encode;
use frame_support::dispatch::DispatchResult;
use frame_support::{assert_err, assert_ok};
use sp_messenger::endpoint::{
Endpoint, EndpointHandler as EndpointHandlerT, EndpointRequest, EndpointResponse,
};
use sp_messenger::messages::ChainId;
use sp_runtime::traits::Convert;
use std::marker::PhantomData;
#[test]
fn test_initiate_transfer_failed() {
new_test_ext().execute_with(|| {
let account = 100;
let balance = Balances::free_balance(account);
assert_eq!(balance, 0);
// transfer 500 to dst_chain id 100
let dst_chain_id = 1.into();
let dst_location = Location {
chain_id: dst_chain_id,
account_id: MockAccountIdConverter::convert(account),
};
let res = Transporter::transfer(RuntimeOrigin::signed(account), dst_location, 500);
assert_err!(res, Error::<MockRuntime>::LowBalance);
})
}
#[test]
fn test_initiate_transfer() {
new_test_ext().execute_with(|| {
let account = USER_ACCOUNT;
let balance = Balances::free_balance(account);
assert_eq!(balance, 1000);
let total_balance = Balances::total_issuance();
assert_eq!(total_balance, 1000);
// transfer 500 to dst_chain id 100
let dst_chain_id = 1.into();
let dst_location = Location {
chain_id: dst_chain_id,
account_id: MockAccountIdConverter::convert(account),
};
let res = Transporter::transfer(RuntimeOrigin::signed(account), dst_location, 500);
assert_ok!(res);
let balance = Balances::free_balance(account);
assert_eq!(balance, 500);
let total_balance = Balances::total_issuance();
assert_eq!(total_balance, 500);
System::assert_has_event(RuntimeEvent::Transporter(
crate::Event::<MockRuntime>::OutgoingTransferInitiated {
chain_id: dst_chain_id,
message_id: 0,
},
));
assert_eq!(
Transporter::outgoing_transfers(dst_chain_id, 0).unwrap(),
Transfer {
amount: 500,
sender: Location {
chain_id: SelfChainId::get(),
account_id: MockAccountIdConverter::convert(account),
},
receiver: Location {
chain_id: dst_chain_id,
account_id: MockAccountIdConverter::convert(account),
},
}
)
})
}
#[test]
fn test_transfer_response_missing_request() {
new_test_ext().execute_with(|| {
let dst_chain_id: ChainId = 1.into();
let amount: Balance = 500;
let account: AccountId = 100;
let encoded_payload = Transfer {
amount,
sender: Location {
chain_id: dst_chain_id,
account_id: MockAccountIdConverter::convert(account),
},
receiver: Location {
chain_id: dst_chain_id,
account_id: MockAccountIdConverter::convert(account),
},
}
.encode();
let res = submit_response(dst_chain_id, encoded_payload, Ok(vec![]));
assert_err!(res, Error::<MockRuntime>::MissingTransferRequest)
})
}
fn initiate_transfer(dst_chain_id: ChainId, account: AccountId, amount: Balance) {
let dst_location = Location {
chain_id: dst_chain_id,
account_id: MockAccountIdConverter::convert(account),
};
let res = Transporter::transfer(RuntimeOrigin::signed(account), dst_location, amount);
assert_ok!(res);
System::assert_has_event(RuntimeEvent::Transporter(
crate::Event::<MockRuntime>::OutgoingTransferInitiated {
chain_id: dst_chain_id,
message_id: 0,
},
));
}
fn submit_response(
dst_chain_id: ChainId,
req_payload: Vec<u8>,
resp: EndpointResponse,
) -> DispatchResult {
let handler = EndpointHandler(PhantomData::<MockRuntime>);
handler.message_response(
dst_chain_id,
0,
EndpointRequest {
src_endpoint: Endpoint::Id(SelfEndpointId::get()),
dst_endpoint: Endpoint::Id(SelfEndpointId::get()),
payload: req_payload,
},
resp,
)
}
fn submit_transfer(src_chain_id: ChainId, req_payload: Vec<u8>) -> EndpointResponse {
let handler = EndpointHandler(PhantomData::<MockRuntime>);
handler.message(
src_chain_id,
0,
EndpointRequest {
src_endpoint: Endpoint::Id(SelfEndpointId::get()),
dst_endpoint: Endpoint::Id(SelfEndpointId::get()),
payload: req_payload,
},
)
}
#[test]
fn test_transfer_response_invalid_request() {
new_test_ext().execute_with(|| {
let account = USER_ACCOUNT;
let amount: Balance = 500;
// transfer 500 to dst_chain id 100
let dst_chain_id: ChainId = 1.into();
initiate_transfer(dst_chain_id, account, amount);
let encoded_payload = Transfer {
amount,
sender: Location {
chain_id: dst_chain_id,
account_id: MockAccountIdConverter::convert(account),
},
receiver: Location {
chain_id: dst_chain_id,
// change receiver id
account_id: MockAccountIdConverter::convert(100),
},
}
.encode();
let res = submit_response(dst_chain_id, encoded_payload, Ok(vec![]));
assert_err!(res, Error::<MockRuntime>::InvalidTransferRequest)
})
}
#[test]
fn test_transfer_response_revert() {
new_test_ext().execute_with(|| {
let account = USER_ACCOUNT;
// transfer 500 to dst_chain id 1
let amount: Balance = 500;
let dst_chain_id: ChainId = 1.into();
// check pre dispatch balances
let balance = Balances::free_balance(account);
assert_eq!(balance, 1000);
let total_balance = Balances::total_issuance();
assert_eq!(total_balance, 1000);
// init transfer
initiate_transfer(dst_chain_id, account, amount);
// check post init
let balance = Balances::free_balance(account);
assert_eq!(balance, 500);
let total_balance = Balances::total_issuance();
assert_eq!(total_balance, 500);
// submit response
let encoded_payload = Transfer {
amount,
sender: Location {
chain_id: dst_chain_id,
account_id: MockAccountIdConverter::convert(account),
},
receiver: Location {
chain_id: dst_chain_id,
account_id: MockAccountIdConverter::convert(account),
},
}
.encode();
let res = submit_response(
dst_chain_id,
encoded_payload,
Err(Error::<MockRuntime>::InvalidPayload.into()),
);
assert_ok!(res);
// balance changes should be reverted.
let balance = Balances::free_balance(account);
assert_eq!(balance, 1000);
let total_balance = Balances::total_issuance();
assert_eq!(total_balance, 1000);
System::assert_has_event(RuntimeEvent::Transporter(
crate::Event::<MockRuntime>::OutgoingTransferFailed {
chain_id: dst_chain_id,
message_id: 0,
err: Error::<MockRuntime>::InvalidPayload.into(),
},
));
})
}
#[test]
fn test_transfer_response_successful() {
new_test_ext().execute_with(|| {
let account = USER_ACCOUNT;
// transfer 500 to dst_chain id 1
let amount: Balance = 500;
let dst_chain_id: ChainId = 1.into();
// check pre dispatch balances
let balance = Balances::free_balance(account);
assert_eq!(balance, 1000);
let total_balance = Balances::total_issuance();
assert_eq!(total_balance, 1000);
// init transfer
initiate_transfer(dst_chain_id, account, amount);
// check post init
let balance = Balances::free_balance(account);
assert_eq!(balance, 500);
let total_balance = Balances::total_issuance();
assert_eq!(total_balance, 500);
// submit response
let encoded_payload = Transfer {
amount,
sender: Location {
chain_id: dst_chain_id,
account_id: MockAccountIdConverter::convert(account),
},
receiver: Location {
chain_id: dst_chain_id,
account_id: MockAccountIdConverter::convert(account),
},
}
.encode();
let res = submit_response(dst_chain_id, encoded_payload, Ok(vec![]));
assert_ok!(res);
// balance changes should be as is.
let balance = Balances::free_balance(account);
assert_eq!(balance, 500);
let total_balance = Balances::total_issuance();
assert_eq!(total_balance, 500);
System::assert_has_event(RuntimeEvent::Transporter(
crate::Event::<MockRuntime>::OutgoingTransferSuccessful {
chain_id: dst_chain_id,
message_id: 0,
},
));
})
}
#[test]
fn test_receive_incoming_transfer() {
new_test_ext().execute_with(|| {
let receiver = 2;
// transfer 500
let amount: Balance = 500;
let src_chain_id: ChainId = 100.into();
let dst_chain_id: ChainId = 1.into();
// check pre dispatch balances
let balance = Balances::free_balance(receiver);
assert_eq!(balance, 0);
let total_balance = Balances::total_issuance();
assert_eq!(total_balance, 1000);
let resp = submit_transfer(
src_chain_id,
Transfer {
amount,
sender: Location {
chain_id: src_chain_id,
account_id: MockAccountIdConverter::convert(0),
},
receiver: Location {
chain_id: dst_chain_id,
account_id: MockAccountIdConverter::convert(receiver),
},
}
.encode(),
);
assert_ok!(resp);
let balance = Balances::free_balance(receiver);
assert_eq!(balance, 500);
let total_balance = Balances::total_issuance();
assert_eq!(total_balance, 1500);
})
}
|
use std::rc::Rc;
use std::cell::RefCell;
use std::collections::BinaryHeap;
use std::collections::VecDeque;
use rand::{ThreadRng, thread_rng};
use rand::distributions::IndependentSample;
use rand::distributions::{Exp, Normal, Range};
use cpu::{Cpu, CpuState};
use event::{Event, EventType};
use request::Request;
#[derive(RustcDecodable, RustcEncodable)]
pub struct SystemParams {
pub n_cpu: usize,
pub n_users: usize,
pub ease_in_time: f64,
pub max_iters: usize,
pub buffer_capacity: usize,
pub threadpool_size: usize,
pub quantum: f64,
pub ctxx_time: f64,
pub service_time_mean: f64,
pub req_timeout_min: f64,
pub req_timeout_max: f64,
pub think_time_mean: f64,
pub think_time_std_dev: f64,
pub retry_think_time_mean: f64,
pub retry_think_time_std_dev: f64,
}
#[derive(Debug)]
pub struct SystemMetrics {
pub time: f64,
pub n_arrivals: usize,
pub n_processed: usize, // incl. timed-out
pub n_timedout: usize, // incl. those in-process
pub n_dropped: usize,
pub n_to_in_proc: usize, // timed-out but still in process
pub sum_resp_time: f64,
pub wt_sum_reqs_in_sys: f64, // time-weighted sum of |requests in system|
pub total_procd_time: f64,
pub total_ctxx_time: f64,
}
struct ReqsInSystem {
last_mod_ts: f64, // last modification timestamp
count: usize,
to_count: usize, // timed out requests in sys
}
fn is_approx_zero(f: f64) -> bool {
-1.0e-12 < f && f < 1.0e-12
}
fn process_request(rc_req: Rc<RefCell<Request>>, rc_cpu: Rc<RefCell<Cpu>>, simtime: f64, quantum: f64) -> Event {
{
let mut cpu = rc_cpu.borrow_mut();
cpu.state = CpuState::Busy(rc_req.clone(), simtime);
}
let rem_serv = rc_req.borrow().remaining_service;
let ev_ts = if rem_serv < quantum {
simtime + rem_serv
} else {
simtime + quantum
};
Event::new(EventType::QuantumOver(rc_cpu.clone()), ev_ts)
}
fn sample_zero_lo<T: IndependentSample<f64>>(sampler: &T, rng: &mut ThreadRng) -> f64 {
let sample = sampler.ind_sample(rng);
if sample < 0.0 { 0.0 } else { sample }
}
pub fn run(sys: &SystemParams) -> SystemMetrics {
let mut sim = SystemMetrics { time: 0.0, n_arrivals:0, n_processed: 0,
n_timedout: 0, n_dropped: 0, n_to_in_proc: 0,
sum_resp_time: 0.0, wt_sum_reqs_in_sys: 0.0,
total_procd_time: 0.0, total_ctxx_time: 0.0 };
let mut reqs_in_sys = ReqsInSystem { last_mod_ts: 0.0, count: 0, to_count: 0 };
let mut events = BinaryHeap::new();
let mut rng = thread_rng();
let mut cpus = Vec::with_capacity(sys.n_cpu);
let mut idle_cpus = Vec::with_capacity(sys.n_cpu);
let mut n_threads = 0;
for _ in 0..sys.n_cpu {
let cpu = Rc::new(RefCell::new(Cpu::new()));
idle_cpus.push(cpu.clone());
cpus.push(cpu);
}
let mut rbuff = VecDeque::with_capacity(sys.buffer_capacity); // Request Buffer
let mut tpool = VecDeque::with_capacity(sys.threadpool_size); // Thread Pool
let ease_in_sampler = Range::new(0.0_f64, sys.ease_in_time);
let service_sampler = Exp::new(1.0/sys.service_time_mean);
let timeout_sampler = Range::new(sys.req_timeout_min, sys.req_timeout_max);
let think_sampler = Normal::new(sys.think_time_mean, sys.think_time_std_dev);
let retry_think_sampler = Normal::new(sys.retry_think_time_mean, sys.retry_think_time_std_dev);
for _ in 0..sys.n_users {
let arrival_ts = sim.time + ease_in_sampler.ind_sample(&mut rng);
let total_service = service_sampler.ind_sample(&mut rng);
let timeout = timeout_sampler.ind_sample(&mut rng);
let (arrival_e, timeout_e) = Event::new_arrival(arrival_ts, total_service, timeout);
events.push(arrival_e);
events.push(timeout_e);
}
let mut iters = 0;
while let Some(e) = events.pop() {
use event::EventType::*;
sim.time = e.timestamp;
match e._type {
Arrival(rc_req) => {
//println!("T={} Arrival {:?}", sim.time, rc_req.borrow());
sim.n_arrivals += 1;
sim.wt_sum_reqs_in_sys += (sim.time - reqs_in_sys.last_mod_ts)*reqs_in_sys.count as f64;
reqs_in_sys.count += 1;
reqs_in_sys.last_mod_ts = sim.time;
debug_assert!(n_threads <= reqs_in_sys.count,
"n_thr {} n_req {} @ {}",
n_threads, reqs_in_sys.count, sim.time);
if n_threads < sys.threadpool_size {
if let Some(rc_cpu) = idle_cpus.pop() {
events.push(process_request(rc_req, rc_cpu, sim.time, sys.quantum));
} else {
tpool.push_back(rc_req);
}
n_threads += 1;
} else if rbuff.len() < sys.buffer_capacity {
rbuff.push_back(rc_req);
} else {
sim.n_dropped += 1;
reqs_in_sys.count -= 1;
// The client cannot know the request was dropped right away.
// Therefore waits for a timeout, and then a retry think time,
// before issuing a new request.
let arrival_ts = sim.time + timeout_sampler.ind_sample(&mut rng) +
sample_zero_lo(&retry_think_sampler, &mut rng);
let total_service = service_sampler.ind_sample(&mut rng);
let timeout = timeout_sampler.ind_sample(&mut rng);
let (arrival_e, timeout_e) = Event::new_arrival(arrival_ts, total_service, timeout);
events.push(arrival_e);
events.push(timeout_e);
}
},
Departure(rc_req) => {
//println!("T={} Departure {:?}", sim.time, rc_req.borrow());
sim.wt_sum_reqs_in_sys += (sim.time - reqs_in_sys.last_mod_ts)*reqs_in_sys.count as f64;
reqs_in_sys.count -= 1;
reqs_in_sys.last_mod_ts = sim.time;
if Rc::weak_count(&rc_req) > 0 { // Request was not timed out
let arrival_ts = sim.time + sample_zero_lo(&think_sampler, &mut rng);
let total_service = service_sampler.ind_sample(&mut rng);
let timeout = timeout_sampler.ind_sample(&mut rng);
let (arrival_e, timeout_e) = Event::new_arrival(arrival_ts, total_service, timeout);
events.push(arrival_e);
events.push(timeout_e);
} else {
reqs_in_sys.to_count -= 1;
}
sim.sum_resp_time += sim.time - rc_req.borrow().arrival_time;
sim.n_processed += 1;
},
CtxSwitched(rc_cpu) => {
//println!("T={} CtxSwitched {:?}", sim.time, rc_cpu.borrow());
let (rc_req_new, rc_req_old, ctxx_start) = match rc_cpu.borrow().state {
CpuState::CtxSwitching( ref rc_req_new, ref rc_req_old, ctxx_start )
=> ( rc_req_new.clone(), rc_req_old.clone(), ctxx_start ),
_ => panic!("Fatal: Cpu was not CtxSwitching at a CtxSwitched!"),
};
rc_cpu.borrow_mut().total_ctxx_time += sim.time - ctxx_start;
let rem_serv = rc_req_old.borrow().remaining_service;
if is_approx_zero(rem_serv) {
events.push(Event::new(EventType::Departure(rc_req_old), sim.time));
n_threads -= 1;
} else {
tpool.push_back(rc_req_old);
}
events.push(process_request(rc_req_new, rc_cpu, sim.time, sys.quantum));
},
QuantumOver(rc_cpu) => {
//println!("T={} QuantumOver {:?}", sim.time, rc_cpu.borrow());
let rc_req_old: Rc<RefCell<Request>> = {
let mut cpu = rc_cpu.borrow_mut();
let ( rc_req, procd_time ) = match cpu.state {
CpuState::Busy( ref rc_req, quantum_start )
=> ( rc_req.clone(), sim.time - quantum_start ),
_ => panic!("Fatal: Cpu was not Busy at a QuantumOver!")
};
rc_req.borrow_mut().remaining_service -= procd_time;
cpu.total_procd_time += procd_time;
rc_req
};
if let Some(rc_req_new) = tpool.pop_front() {
rc_cpu.borrow_mut().state = CpuState::CtxSwitching(rc_req_new, rc_req_old, sim.time);
events.push(Event::new(EventType::CtxSwitched(rc_cpu), sim.time + sys.ctxx_time));
} else if !is_approx_zero(rc_req_old.borrow().remaining_service) { // > 0.0
events.push(process_request(rc_req_old, rc_cpu, sim.time, sys.quantum));
} else if let Some(rc_req_new) = rbuff.pop_front() { // logical?
rc_cpu.borrow_mut().state = CpuState::CtxSwitching(rc_req_new, rc_req_old, sim.time);
events.push(Event::new(EventType::CtxSwitched(rc_cpu), sim.time + sys.ctxx_time));
} else {
n_threads -= 1;
rc_cpu.borrow_mut().state = CpuState::Idle;
idle_cpus.push(rc_cpu);
events.push(Event::new(EventType::Departure(rc_req_old), sim.time));
}
},
Timeout(weak_req) => match weak_req.upgrade() {
Some(rc_req) => {
//println!("T={} Timedout! {:?}", sim.time, rc_req.borrow());
sim.n_timedout += 1;
reqs_in_sys.to_count += 1;
let arrival_ts = sim.time + sample_zero_lo(&retry_think_sampler, &mut rng);
let total_service = service_sampler.ind_sample(&mut rng);
let timeout = timeout_sampler.ind_sample(&mut rng);
let (arrival_e, timeout_e) = Event::new_arrival(arrival_ts, total_service, timeout);
events.push(arrival_e);
events.push(timeout_e);
},
None => {}
},
}
iters += 1;
if iters >= sys.max_iters {
break;
}
}
let (total_procd_time, total_ctxx_time) = cpus.into_iter().fold((0.0, 0.0), |sum, cpu_rc| {
let cpu = cpu_rc.borrow();
(sum.0 + cpu.total_procd_time, sum.1 + cpu.total_ctxx_time)
});
sim.total_procd_time = total_procd_time;
sim.total_ctxx_time = total_ctxx_time;
sim.n_to_in_proc = reqs_in_sys.to_count;
sim
}
|
use std::error::Error;
use std::{fmt, io};
use futures::Future;
use tokio_executor::current_thread::{self, CurrentThread};
use tokio_timer::{timer::{self, Timer}, clock::Clock};
use tokio_net::driver::{Reactor, Handle as ReactorHandle};
use crate::builder::Builder;
/// Single-threaded runtime provides a way to start reactor
/// and executor on the current thread.
///
/// See [module level][mod] documentation for more details.
///
/// [mod]: index.html
#[derive(Debug)]
pub struct Runtime {
reactor_handle: ReactorHandle,
timer_handle: timer::Handle,
clock: Clock,
executor: CurrentThread<Timer<Reactor>>,
}
/// Error returned by the `run` function.
#[derive(Debug)]
pub struct RunError {
inner: current_thread::RunError,
}
impl fmt::Display for RunError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}", self.inner)
}
}
impl Error for RunError {
fn description(&self) -> &str {
self.inner.description()
}
fn cause(&self) -> Option<&dyn Error> {
self.inner.source()
}
}
impl Runtime {
#[allow(clippy::new_ret_no_self)]
/// Returns a new runtime initialized with default configuration values.
pub fn new() -> io::Result<Runtime> {
Builder::new().build_rt()
}
pub(super) fn new2(
reactor_handle: ReactorHandle,
timer_handle: timer::Handle,
clock: Clock,
executor: CurrentThread<Timer<Reactor>>,
) -> Runtime {
Runtime {
reactor_handle,
timer_handle,
clock,
executor,
}
}
/// Spawn a future onto the single-threaded Tokio runtime.
///
/// See [module level][mod] documentation for more details.
///
/// [mod]: index.html
///
/// # Examples
///
/// ```rust
/// # use futures::{future, Future, Stream};
/// use actix_rt::Runtime;
///
/// # fn dox() {
/// // Create the runtime
/// let mut rt = Runtime::new().unwrap();
///
/// // Spawn a future onto the runtime
/// rt.spawn(future::lazy(|| {
/// println!("running on the runtime");
/// Ok(())
/// }));
/// # }
/// # pub fn main() {}
/// ```
///
/// # Panics
///
/// This function panics if the spawn fails. Failure occurs if the executor
/// is currently at capacity and is unable to spawn a new future.
pub fn spawn<F>(&mut self, future: F) -> &mut Self
where
F: Future<Output = (),> + 'static,
{
self.executor.spawn(future);
self
}
/// Runs the provided future, blocking the current thread until the future
/// completes.
///
/// This function can be used to synchronously block the current thread
/// until the provided `future` has resolved either successfully or with an
/// error. The result of the future is then returned from this function
/// call.
///
/// Note that this function will **also** execute any spawned futures on the
/// current thread, but will **not** block until these other spawned futures
/// have completed. Once the function returns, any uncompleted futures
/// remain pending in the `Runtime` instance. These futures will not run
/// until `block_on` or `run` is called again.
///
/// The caller is responsible for ensuring that other spawned futures
/// complete execution by calling `block_on` or `run`.
pub fn block_on<F>(&mut self, f: F) -> F::Output
where
F: Future,
{
self.enter(|executor| {
// Run the provided future
let ret = executor.block_on(f);
ret
})
}
/// Run the executor to completion, blocking the thread until **all**
/// spawned futures have completed.
pub fn run(&mut self) -> Result<(), RunError> {
self.enter(|executor| executor.run())
.map_err(|e| RunError { inner: e })
}
fn enter<F, R>(&mut self, f: F) -> R
where
F: FnOnce(&mut CurrentThread<Timer<Reactor>>) -> R,
{
let Runtime {
ref reactor_handle,
ref timer_handle,
ref clock,
ref mut executor,
..
} = *self;
// WARN: We do not enter the executor here, since in tokio 0.2 the executor is entered
// automatically inside its `block_on` and `run` methods
tokio_executor::with_default(&mut current_thread::TaskExecutor::current(),|| {
tokio_timer::clock::with_default(clock, || {
let _reactor_guard = tokio_net::driver::set_default(reactor_handle);
let _timer_guard = tokio_timer::set_default(timer_handle);
f(executor)
})
})
}
}
|
//! The abstract syntax tree of buildlua.
//!
//! Most of this file is based on the syntax of lua page in the lua manual.
//!
//! The page can be found at http://www.lua.org/manual/5.2/manual.html#9.
/// The largest unit of code in lua. Equates to an entire file of code.
pub struct Chunk(pub Block);
pub struct Block(pub Option<Vec<Statement>>, pub Option<Box<ReturnStatement>>);
pub enum Statement {
Semicolon,
Assignment(Box<VariableList>, Box<ExpressionList>),
FunctionCall(Box<FunctionCall>),
Label(Label),
Break,
Goto(Label),
Do(Box<Block>),
While { exp: Box<Expression>, do_: Box<Block> },
Repeat { block: Box<Block>, until: Box<Expression> },
If {
condition: Box<Expression>, then: Box<Block>,
elseif_condition: Option<Box<Expression>>, elsethen: Option<Box<Block>>,
else_: Box<Block>
},
ForStepping { name: String, from: Box<Expression>, to: Box<Expression>, step: Option<Box<Expression>>, block: Box<Block> },
ForIn { name_list: Box<NameList>, in_: Box<ExpressionList>, do_: Box<Block> },
Function(FunctionName, Box<FunctionBody>),
LocalFunction { name: String, body: Box<FunctionBody> },
LocalVariableBinding(NameList, Option<ExpressionList>),
}
pub struct ReturnStatement(pub Box<ExpressionList>);
pub struct Label(pub String);
pub struct FunctionName {
pub first_dot_access: String,
pub rest_dot_access: Option<Vec<String>>,
pub self_name: Option<String>,
}
pub struct VariableList {
pub first: Box<Variable>,
pub rest: Option<Vec<Variable>>,
}
pub enum Variable {
Name(String),
ArrayAccess { from: Box<PrefixExpression>, key: Box<Expression> },
DotAccess { from: Box<PrefixExpression>, key: String },
}
pub struct NameList(pub String, pub Option<Vec<String>>);
pub struct ExpressionList(pub Box<Expression>, pub Option<Vec<Expression>>);
pub enum Expression {
Nil,
False,
True,
Number(f64),
String(String),
/// Reperesents the lua ... variable in a function that takes extended arguments with a ...
/// at the end of it's argument list.
ExtendedArgumentAccess,
FunctionDefine(Box<FunctionDefine>),
PrefixExpression(Box<PrefixExpression>),
TableConstructor(Box<TableConstructor>),
BinaryOperation(Box<Expression>, Box<Expression>),
UnaryOperation(Box<Expression>),
}
pub enum PrefixExpression {
Variable(Box<Variable>),
FunctionCall(Box<FunctionCall>),
Parenthesis(Box<Expression>),
}
pub enum FunctionCall {
/// Calling the function with a . to access through tables or through the global enviroment,
/// not passing a self argument.
///
/// # Example
/// ```lua
/// abcde("foo")
/// bar.foo("foobar")
/// ```
Static(Box<PrefixExpression>, Box<FunctionArguments>),
/// Calling with a self argument, using a : access.
///
/// # Example
/// ```lua
/// foo:bar("barfoo")
/// ```
SelfTaking(Box<PrefixExpression>, String, FunctionArguments),
}
pub enum FunctionArguments {
/// Calling a function normally.
///
/// # Example
/// ```lua
/// foo("abc")
/// ```
Parenthesis(Option<Box<ExpressionList>>),
/// Calling the function with a single table.
///
/// # Example
/// ```lua
/// foo{a = "aaa"}
/// ```
TableConstructor(Box<TableConstructor>),
/// Calls the function with a singlur literal string.
///
/// # Example
/// ```lua
/// foo"abc"
/// ```
String(String), // This is really weird. I see the point, but why?
}
pub struct FunctionDefine(pub Box<FunctionBody>);
pub struct FunctionBody(pub Option<Box<ParameterList>>, pub Box<Block>);
pub enum ParameterList {
/// A normal parameter list, without any extended arguments or special features.
///
/// # Example
/// ```lua
/// function foo(a, b)
/// end
/// ```
NameList(Box<NameList>),
/// A parameter list with both arguments and an extended argument.
///
/// # Example
/// ```lua
/// function foo(a, b, ...)
/// ```
ExtendedArguments(Box<NameList>),
/// A function with an extended argument, but no regular arguments.
///
/// # Example
/// ```lua
/// function foo(...)
/// ```
ExtendedArgumentsVoid,
}
pub struct TableConstructor(pub Box<FieldList>);
pub struct FieldList(pub Box<Field>, pub Option<Vec<Field>>);
pub enum Field {
/// A field in a table that takes an expression for the name of the field.
ExpressionForName { name: Box<Expression>, equals: Box<Expression> },
/// A normal field.
///
/// # Example
/// ```lua
/// a = { b = "bbb" }
/// ```
Equals { name: String, equals: Box<Expression> },
/// A field in the form of an array.
///
/// # Example
/// ```lua
/// a = { "bbb" }
/// ```
ArrayStyle(Box<Expression>),
}
pub enum BinaryOperation {
/// + sigil.
Plus,
/// - sigil, when used as a binary operation.
Minus,
/// * sigil.
Times,
/// / sigil.
Devide,
/// ^ sigil.
Exponent,
/// % sigil.
Modulo,
/// .. sigil.
Concatanate,
/// < sigil.
LessThan,
/// <= sigil.
LessThanOrEqual,
/// > sigil.
GreaterThan,
/// >= sigil.
GreaterThanOrEqual,
/// == sigil.
Equal,
/// ~= sigil.
NotEqual,
And,
Or,
}
pub enum UnaryOperation {
/// - sigil, when used as a unary operation.
Negate,
Not,
/// # sigil.
Length,
}
|
#[doc = "Register `TZC_PID0` reader"]
pub type R = crate::R<TZC_PID0_SPEC>;
#[doc = "Field `PER_ID_0` reader - PER_ID_0"]
pub type PER_ID_0_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:7 - PER_ID_0"]
#[inline(always)]
pub fn per_id_0(&self) -> PER_ID_0_R {
PER_ID_0_R::new((self.bits & 0xff) as u8)
}
}
#[doc = "Peripheral ID 0.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`tzc_pid0::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct TZC_PID0_SPEC;
impl crate::RegisterSpec for TZC_PID0_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`tzc_pid0::R`](R) reader structure"]
impl crate::Readable for TZC_PID0_SPEC {}
#[doc = "`reset()` method sets TZC_PID0 to value 0x60"]
impl crate::Resettable for TZC_PID0_SPEC {
const RESET_VALUE: Self::Ux = 0x60;
}
|
// Copyright (c) 2021, Roel Schut. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
use gdnative::api::CPUParticles2D;
use gdnative::prelude::*;
#[derive(NativeClass)]
#[inherit(CPUParticles2D)]
pub struct DestroyParticles {}
#[methods]
impl DestroyParticles {
fn new(_owner: &CPUParticles2D) -> Self {
DestroyParticles {}
}
#[export]
fn _ready(&self, owner: &CPUParticles2D) {
owner.set_one_shot(true);
}
#[allow(non_snake_case)]
#[export]
fn _on_Timer_timeout(&self, owner: &CPUParticles2D) {
owner.queue_free();
}
}
|
use std::collections::HashMap;
use crate::utils::file2vec;
pub fn day6(filename: &String){
let contents = file2vec::<String>(filename);
let contents:GappyList = GappyList{list: &contents.iter().map(|x| x.to_owned().unwrap()).collect(), ptr: 0, size:contents.len()};
let ans: Vec<i32> = contents.into_iter()
.fold(vec![0,0], |mut acc, group|{
acc[0] += group.answers.len() as i32;
acc[1] += group.answered_by_all();
acc
});
println!("Part 1 ans: {}\nPart 2 ans: {}", ans[0], ans[1]);
}
struct GappyList<'a>{
list: &'a Vec<String>,
ptr: usize,
size: usize,
// parse: fn(&String)->T
}
struct GroupAnswer{
answers: HashMap<char, usize>,
size: usize
}
impl GroupAnswer {
fn answered_by_all(&self)->i32{
self.answers.values().fold(0, |mut acc, val|{
acc += if *val == self.size {1} else {0};
acc
})
}
}
fn parse_line(line: &String, map: HashMap<char, usize>)->HashMap<char, usize>{
line.chars().fold(map, |mut acc, ch| -> HashMap<char, usize> {
match acc.get_mut(&ch){
Some(x) => *x+=1,
None => {acc.insert(ch, 1); ()}
};
acc
})
}
impl<'a> Iterator for GappyList<'a> {
type Item = GroupAnswer;
fn next(&mut self) -> Option<Self::Item> {
match self.ptr {
x if x < self.size => {
let mut map = HashMap::new();
let mut row = &self.list[self.ptr];
let mut size:usize = 0;
while row != "" {
map = parse_line(row, map);
size += 1;
self.ptr += 1;
if self.ptr == self.size {
break
}
row = &self.list[self.ptr];
};
self.ptr += 1;
Some(GroupAnswer{ answers: map, size: size})
},
_ => {self.ptr = 0; None}
}
}
} |
pub mod sort {
pub fn insertion_sort<T: PartialOrd>(seq: &mut [T]) {
let mut i = 1;
while i < seq.len() {
let mut k = i;
while k > 0 && seq[k - 1] > seq[k] {
seq.swap(k, k - 1);
k -= 1;
}
i += 1;
}
}
pub fn rquicksort<T: PartialOrd>(seq: &mut [T]) {
if seq.len() > 1 {
let pivot = hoare_partition(seq);
assert!(pivot < seq.len());
rquicksort(&mut seq[..pivot]);
rquicksort(&mut seq[pivot + 1..]);
}
}
/// Partitions `seq` such that elements smaller than a pivot value (the
/// first element of `seq`) are placed to the left of the pivot, while
/// elements equal or larger than pivot are placed to the right of the
/// pivot. This is the Hoare partition scheme.
pub fn hoare_partition<T: PartialOrd>(seq: &mut [T]) -> usize {
assert!(!seq.is_empty());
let mut pivot = 0;
let mut lo = 0;
let mut hi = seq.len() - 1;
loop {
while seq[lo] < seq[pivot] {
lo += 1;
}
while seq[hi] > seq[pivot] {
hi -= 1;
}
// If the two indices met, we're done.
if lo >= hi {
return hi;
}
// In case of duplicate elements it's possible that the pivot value
// is not unique but the algorithm will not advance either indices
// (because they're the same as pivot), so we must nudge one of the
// indices forward to either arrive at a non-pivot value or meet the
// other index.
if seq[lo] == seq[pivot] && seq[hi] == seq[pivot] {
lo += 1;
continue;
}
seq.swap(lo, hi);
// If either lo or hi was pivot, the pivot index needs to be updated.
if pivot == lo {
pivot = hi;
} else if pivot == hi {
pivot = lo;
}
}
}
pub fn lomuto_partition<T: PartialOrd>(seq: &mut [T]) -> usize {
assert!(!seq.is_empty());
let mut i = 0;
let mut k = i;
let pivot = seq.len() - 1;
while k < seq.len() - 1 {
if seq[k] < seq[pivot] {
seq.swap(i, k);
i += 1;
}
k += 1;
}
if *seq.last().unwrap() < seq[i] {
seq.swap(i, pivot);
}
i
}
}
#[cfg(test)]
extern crate rand;
#[cfg(test)]
mod tests {
use rand::{self, Rng};
use super::*;
fn test_sorter<S: Fn(&mut [i32])>(sort: S) {
let mut array = [5, 10, 3, 3, 9, 2, 1];
sort(&mut array);
assert_eq!(array, [1, 2, 3, 3, 5, 9, 10]);
let mut array = [5, 10, 3, 2, 3, 9, 2, 1];
sort(&mut array);
assert_eq!(array, [1, 2, 2, 3, 3, 5, 9, 10]);
let mut array = [3, 5, 10, 3, 2, 3, 9, 2, 1];
sort(&mut array);
assert_eq!(array, [1, 2, 2, 3, 3, 3, 5, 9, 10]);
let mut array = [5, 10, 3, 9, 2, 1];
sort(&mut array);
assert_eq!(array, [1, 2, 3, 5, 9, 10]);
let mut array = [5, 5, 5];
sort(&mut array);
assert_eq!(array, [5, 5, 5]);
let mut array = [5, 1, 5, 1, 5, 1, 5];
sort(&mut array);
assert_eq!(array, [1, 1, 1, 5, 5, 5, 5]);
let mut array = [1];
sort(&mut array);
assert_eq!(array, [1]);
let mut array: [i32; 0] = [];
sort(&mut array);
assert_eq!(array, []);
// Taken from https://github.com/servo/rust-quicksort/blob/master/lib.rs
let mut rng = rand::thread_rng();
for _ in 0u32 .. 50000u32 {
let len: usize = rng.gen();
let mut v: Vec<i32> = rng
.gen_iter::<i32>()
.take((len % 32) + 1)
.collect();
sort(&mut v[..]);
for i in 0 .. v.len() - 1 {
assert!(v[i] <= v[i + 1])
}
}
}
#[test]
fn hoare_partition() {
let mut array = [5, 10, 3, 3, 9, 2, 1];
let pivot = sort::hoare_partition(&mut array);
assert_eq!(array, [1, 2, 3, 3, 5, 9, 10]);
assert_eq!(pivot, 4);
let mut array = [5, 10, 3, 2, 3, 9, 2, 1];
let pivot = sort::hoare_partition(&mut array);
assert_eq!(array, [1, 2, 3, 2, 3, 5, 9, 10]);
assert_eq!(pivot, 5);
let mut array = [3, 5, 3, 2, 3, 9, 2, 1];
let pivot = sort::hoare_partition(&mut array);
assert_eq!(array, [1, 2, 3, 2, 3, 3, 9, 5]);
assert_eq!(pivot, 5);
let mut rng = rand::thread_rng();
for _ in 0u32 .. 50000u32 {
let len: usize = rng.gen();
let mut v: Vec<i32> = rng
.gen_iter::<i32>()
.take((len % 32) + 1)
.collect();
let pivot = sort::hoare_partition(&mut v);
for i in 0..pivot {
for k in pivot..v.len() - 1 {
assert!(v[i] < v[k]);
}
}
}
}
#[test]
fn insertion_sort() {
test_sorter(|mut array| sort::insertion_sort(&mut array));
}
#[test]
fn rquicksort() {
test_sorter(|mut array| sort::rquicksort(&mut array));
}
}
|
use std::marker::PhantomData;
//TODO: Write and use this, instead of reference counting
#[derive(Debug)]
pub struct GarbageCollector {}
impl GarbageCollector {
pub fn new() -> Self {
GarbageCollector {}
}
}
pub struct Gc<T> {
_phantom_data: PhantomData<T>,
}
|
#[cfg(test)]
mod tests {
use trybuild;
#[test]
fn test_that_we_can_test_the_macro() {
let t = trybuild::TestCases::new();
t.pass("tests/ui/key-tests-assign-id-pass.rs");
t.compile_fail("tests/ui/key-tests-assign-id-nonexist-fail.rs");
}
}
|
mod vector;
mod ray;
mod sphere;
mod color;
mod material;
mod hit;
use crate::vector::{ Vector3 };
use crate::ray::{ Ray, Intersectable };
use crate::sphere::{ Sphere };
use crate::color::{ Color };
use crate::hit::{ Hit };
use crate::material::{ Material };
use std::fs::File;
use std::io::Write;
use rand::{ thread_rng, prelude::RngCore, Rng };
fn any_as_u8_slice<T: Sized>(p: &T) -> &[u8] {
unsafe {
::std::slice::from_raw_parts(
(p as *const T) as *const u8,
::std::mem::size_of::<T>(),
)
}
}
#[repr(C, packed(1))]
struct BMPHeader {
file_type: [u8; 2],
file_size: u32,
reserved1: u16,
reserved2: u16,
bitmap_offset: u32,
size: u32,
width: i32,
height: i32,
planes: u16,
bits_per_pixel: u16,
compression: u32,
size_of_bitmap: u32,
horizontal_resolution: i32,
vertical_resolution: i32,
colors_used: u32,
colors_important: u32,
red_mask: [u8; 4],
green_mask: [u8; 4],
blue_mask: [u8; 4],
}
fn get_closest_hit(ray: &Ray, objects: &Vec::<Box<dyn Intersectable>>) -> Option<Hit> {
let mut nearest_hit: Option<Hit> = Option::None;
for object in objects {
match object.intersect(ray) {
Some(hit) => {
match nearest_hit {
Some(nearest) => {
if hit.distance < nearest.distance {
nearest_hit = Option::from(hit);
}
},
None => {
nearest_hit = Option::from(hit);
}
}
},
None => {}
}
}
return nearest_hit;
}
fn get_ray_color(ray: &Ray, objects: &Vec::<Box<dyn Intersectable>>, rng: &mut dyn RngCore, depth: u64) -> Color {
match get_closest_hit(&ray, &objects) {
Some(hit) => {
let mut color = hit.material.diffuse;
if depth > 5 {
let rand: f64 = rng.gen();
let max_component = color.r.max(color.g.max(color.b));
if rand < max_component && depth < 500 {
color = color * (1.0 / max_component);
} else {
return hit.material.emission;
}
}
let r1 = rng.gen::<f64>() * std::f64::consts::TAU;
let r2 = rng.gen::<f64>();
let r2s = r2.sqrt();
let w = hit.normal;
let tmp = if w.x.abs() > 0.1 {
Vector3::new(0.0, 1.0, 0.0)
} else {
Vector3::new(1.0, 0.0, 0.0)
};
let u = Vector3::cross(&tmp, &w);
let v = Vector3::cross(&w, &u);
let new_dir = u * r1.cos() * r2s + v * r1.sin() * r2s + w * (1.0 - r2).sqrt();
let new_ray = Ray::new(hit.position, new_dir.normalized());
return hit.material.emission + color * get_ray_color(&new_ray, objects, rng, depth + 1);
},
None => {
return Color::new(0.0, 0.0, 0.0);
},
}
}
use std::io::prelude::*;
fn main() {
let args: Vec<String> = std::env::args().collect();
let scene_path;
if args.len() == 1 {
scene_path = "test.scene";
} else if args.len() > 2 {
panic!("Too many arguments!");
} else {
scene_path = &args[1];
}
let mut scene_file = File::open(scene_path).expect("Unable to open scene file!");
let mut scene_source = String::new();
scene_file.read_to_string(&mut scene_source).unwrap();
let mut tokens = Vec::<&str>::new();
for line in scene_source.lines() {
for token in line.split_whitespace() {
tokens.push(token);
}
}
let mut token_index: usize = 0;
let mut width: usize = 0;
let mut height: usize = 0;
let mut num_samples: usize = 1;
let mut camera_position = Vector3::new(0.0, 0.0, 0.0);
let mut camera_forward = Vector3::new(0.0, 0.0, 1.0).normalized();
let mut objects = Vec::<Box<dyn Intersectable>>::new();
loop {
if token_index >= tokens.len() {
break;
}
let mut next = || {
let token = &tokens[token_index];
token_index += 1;
return token;
};
let name = next();
match name {
&"width" => {
assert_eq!(next(), &"=");
width = next().parse::<usize>().unwrap();
},
&"height" => {
assert_eq!(next(), &"=");
height = next().parse::<usize>().unwrap();
},
&"samples" => {
assert_eq!(next(), &"=");
num_samples = next().parse::<usize>().unwrap();
},
&"camera_position" => {
assert_eq!(next(), &"=");
assert_eq!(next(), &"{");
let mut position = Vector3::new(0.0, 0.0, 0.0);
position.x = next().parse::<f64>().unwrap();
position.y = next().parse::<f64>().unwrap();
position.z = next().parse::<f64>().unwrap();
assert_eq!(next(), &"}");
camera_position = position;
},
&"camera_direction" => {
assert_eq!(next(), &"=");
assert_eq!(next(), &"{");
let mut direction = Vector3::new(0.0, 0.0, 0.0);
direction.x = next().parse::<f64>().unwrap();
direction.y = next().parse::<f64>().unwrap();
direction.z = next().parse::<f64>().unwrap();
assert_eq!(next(), &"}");
camera_forward = direction.normalized();
},
&"Sphere" => {
assert_eq!(next(), &"{");
assert_eq!(next(), &"{");
let mut position = Vector3::new(0.0, 0.0, 0.0);
position.x = next().parse::<f64>().unwrap();
position.y = next().parse::<f64>().unwrap();
position.z = next().parse::<f64>().unwrap();
assert_eq!(next(), &"}");
let radius = next().parse::<f64>().unwrap();
assert_eq!(next(), &"{");
let mut diffuse = Color::new(0.0, 0.0, 0.0);
diffuse.r = next().parse::<f64>().unwrap();
diffuse.g = next().parse::<f64>().unwrap();
diffuse.b = next().parse::<f64>().unwrap();
assert_eq!(next(), &"}");
assert_eq!(next(), &"{");
let mut emission = Color::new(0.0, 0.0, 0.0);
emission.r = next().parse::<f64>().unwrap();
emission.g = next().parse::<f64>().unwrap();
emission.b = next().parse::<f64>().unwrap();
assert_eq!(next(), &"}");
assert_eq!(next(), &"}");
objects.push(Box::from(Sphere::new(position, radius, Material::new(diffuse, emission))));
},
_ => panic!("Unknown name!"),
}
}
// NOTE: Open file here so we know that we will be able to output image after its been generated
let mut file = File::create("./out_image.bmp").expect("Unable to create file!");
/*
let blue_material = Material::new(
Color::new(0.2, 0.4, 0.8),
Color::new(0.0, 0.0, 0.0),
);
let light_material = Material::new(
Color::new(1.0, 1.0, 1.0),
Color::new(2.5, 2.5, 2.5),
);
objects.push(Box::from(Sphere::new(Vector3::new(2.0, 0.0, 0.0), 2.0, blue_material)));
objects.push(Box::from(Sphere::new(Vector3::new(-2.0, 0.0, -1.0), 1.0, light_material)));
*/
let camera_right = Vector3::cross(&Vector3::new(0.0, 1.0, 0.0), &camera_forward).normalized();
let camera_up = Vector3::cross(&camera_forward, &camera_right).normalized();
let mut pixels = vec![Color::new(0.0, 0.0, 0.0); (width * height) as usize];
// TODO: Seeded per pixel
let mut rng = thread_rng();
let aspect = width as f64 / height as f64;
let mut i: usize = 0;
for y in 0..height {
let norm_y = (y as f64 / height as f64) * 2.0 - 1.0;
for x in 0..width {
let norm_x = (x as f64 / width as f64) * 2.0 - 1.0;
let ray = Ray::new(
camera_position,
(camera_forward
+ (camera_right * (norm_x * aspect))
+ (camera_up * norm_y)).normalized()
);
let pixel = &mut pixels[(x + y * width) as usize];
for _ in 0..num_samples {
*pixel = *pixel + get_ray_color(&ray, &objects, &mut rng, 0) * (1.0 / num_samples as f64);
}
i += 1;
if i % 50 == 0 {
print!("Rendering: {:.2}% ({} of {})\r", (i as f64 / (width * height) as f64) * 100.0, i, width * height);
}
}
}
println!("Rendering: 100.00%");
let header = BMPHeader {
file_type: [0x42, 0x4D],
file_size: (std::mem::size_of::<BMPHeader>() + (width * height * 4)) as u32,
reserved1: 0,
reserved2: 0,
bitmap_offset: std::mem::size_of::<BMPHeader>() as u32,
size: 40,
width: width as i32,
height: height as i32,
planes: 1,
bits_per_pixel: 32,
compression: 3,
size_of_bitmap: 0,
horizontal_resolution: 0,
vertical_resolution: 0,
colors_used: 0,
colors_important: 0,
red_mask: [0xFF, 0x00, 0x00, 0x00],
green_mask: [0x00, 0xFF, 0x00, 0x00],
blue_mask: [0x00, 0x00, 0xFF, 0x00],
};
file.write_all(any_as_u8_slice(&header)).expect("Unable to write header to file!");
i = 0;
for pixel in pixels {
file.write_all(&[
(pixel.r * 255.0) as u8,
(pixel.g * 255.0) as u8,
(pixel.b * 255.0) as u8,
0 as u8,
]).expect("Unable to write to file!");
i += 1;
if i % 500 == 0 {
print!("Ouputing: {:.2}% ({} of {})\r", (i as f64 / (width * height) as f64) * 100.0, i, width * height);
}
}
println!("Ouputing: 100.00%");
println!("Done.");
}
|
use sudo_test::{Command, Env, TextFile, User};
use crate::{Result, PASSWORD, USERNAME};
#[test]
fn can_retry_password() -> Result<()> {
let env = Env(format!("{USERNAME} ALL=(ALL:ALL) ALL"))
.user(User(USERNAME).password(PASSWORD))
.build()?;
Command::new("sh")
.arg("-c")
.arg(format!(
"(echo wrong-password; echo {PASSWORD}) | sudo -S true"
))
.as_user(USERNAME)
.output(&env)?
.assert_success()
}
#[test]
fn three_retries_allowed_by_default() -> Result<()> {
let env = Env(format!("{USERNAME} ALL=(ALL:ALL) ALL"))
.user(User(USERNAME).password(PASSWORD))
.build()?;
let output = Command::new("sh")
.arg("-c")
.arg(format!(
"(for i in $(seq 1 3); do echo wrong-password; done; echo {PASSWORD}) | sudo -S true"
))
.as_user(USERNAME)
.output(&env)?;
assert!(!output.status().success());
assert_eq!(Some(1), output.status().code());
let stderr = output.stderr();
let diagnostic = if sudo_test::is_original_sudo() {
"3 incorrect password attempts"
} else {
"3 incorrect authentication attempts"
};
assert_contains!(output.stderr(), diagnostic);
let password_prompt = if sudo_test::is_original_sudo() {
"password for ferris:"
} else {
"Password:"
};
let num_password_prompts = stderr
.lines()
.filter(|line| line.contains(password_prompt))
.count();
assert_eq!(3, num_password_prompts);
Ok(())
}
#[test]
fn defaults_passwd_tries() -> Result<()> {
let env = Env(format!(
"{USERNAME} ALL=(ALL:ALL) ALL
Defaults passwd_tries=2"
))
.user(User(USERNAME).password(PASSWORD))
.build()?;
let output = Command::new("sh")
.arg("-c")
.arg(format!(
"(for i in $(seq 1 2); do echo wrong-password; done; echo {PASSWORD}) | sudo -S true"
))
.as_user(USERNAME)
.output(&env)?;
assert!(!output.status().success());
assert_eq!(Some(1), output.status().code());
let stderr = output.stderr();
let diagnostic = if sudo_test::is_original_sudo() {
"2 incorrect password attempts"
} else {
"2 incorrect authentication attempts"
};
assert_contains!(stderr, diagnostic);
let password_prompt = if sudo_test::is_original_sudo() {
"password for ferris:"
} else {
"Password:"
};
let num_password_prompts = stderr
.lines()
.filter(|line| line.contains(password_prompt))
.count();
assert_eq!(2, num_password_prompts);
Ok(())
}
// this is a PAM security feature
#[test]
fn retry_is_not_allowed_immediately() -> Result<()> {
let script_path = "/tmp/script.sh";
let env = Env(format!("{USERNAME} ALL=(ALL:ALL) ALL"))
.file(
script_path,
TextFile(include_str!("password_retry/time-password-retry.sh")).chmod("777"),
)
.user(User(USERNAME).password(PASSWORD))
.build()?;
let delta_millis = time_password_retry(script_path, env)?;
// by default, the retry delay should be around 2 seconds
// use a lower value to avoid sporadic failures
assert!(delta_millis >= 1250);
Ok(())
}
fn time_password_retry(script_path: &str, env: Env) -> Result<u64> {
let stdout = Command::new("sh")
.arg(script_path)
.as_user(USERNAME)
.output(&env)?
.stdout()?;
let timestamps = stdout
.lines()
.filter_map(|line| line.parse::<u64>().ok())
.collect::<Vec<_>>();
assert_eq!(2, timestamps.len());
let delta_millis = timestamps[1] - timestamps[0];
dbg!(delta_millis);
Ok(delta_millis)
}
#[test]
fn can_control_retry_delay_using_pam() -> Result<()> {
const NEW_DELAY_MICROS: u32 = 5_000_000;
let script_path = "/tmp/script.sh";
let check_env = Env(format!("{USERNAME} ALL=(ALL:ALL) ALL"))
.file(
script_path,
TextFile(include_str!("password_retry/time-password-retry.sh")).chmod("777"),
)
.user(User(USERNAME).password(PASSWORD))
.build()?;
let common_auth = Command::new("cat")
.arg("/etc/pam.d/common-auth")
.output(&check_env)?
.stdout()?;
let common_auth = common_auth
.lines()
.filter(|line| !line.trim_start().starts_with('#') && !line.trim().is_empty())
.collect::<Vec<&str>>()
.join("\n");
assert_eq!(
"auth\t[success=1 default=ignore]\tpam_unix.so nullok
auth\trequisite\t\t\tpam_deny.so
auth\trequired\t\t\tpam_permit.so",
common_auth,
"the stock /etc/pam.d/common-auth file has changed; this test needs to be updated"
);
let initial_delta_millis = time_password_retry(script_path, check_env)?;
// increase the retry delay from 2 seconds to 5
let env = Env(format!("{USERNAME} ALL=(ALL:ALL) ALL"))
.user(User(USERNAME).password(PASSWORD))
.file(
"/etc/pam.d/common-auth",
format!(
"auth optional pam_faildelay.so delay={NEW_DELAY_MICROS}
auth [success=1 default=ignore] pam_unix.so nullok nodelay
auth requisite pam_deny.so
auth required pam_permit.so"
),
)
.file(
script_path,
TextFile(include_str!("password_retry/time-password-retry.sh")).chmod("777"),
)
.build()?;
let newer_delta_millis = time_password_retry(script_path, env)?;
// use a lower value to avoid sporadic failures
assert!(newer_delta_millis >= 3_100);
assert!(
newer_delta_millis > initial_delta_millis,
"password retry delay appears to not have increased.
it could be that the image defaults to a high retry delay value; \
you may want to increase NEW_DELAY_MICROS"
);
Ok(())
}
|
use bytes::{Buf, BufMut, Bytes, BytesMut};
use super::{Body, Frame};
use crate::utils::Writeable;
#[derive(Debug, Eq, PartialEq)]
pub struct MetadataPush {
metadata: Option<Bytes>,
}
pub struct MetadataPushBuiler {
stream_id: u32,
flag: u16,
value: MetadataPush,
}
impl MetadataPushBuiler {
fn new(stream_id: u32, flag: u16) -> MetadataPushBuiler {
MetadataPushBuiler {
stream_id,
flag,
value: MetadataPush { metadata: None },
}
}
pub fn set_metadata(mut self, metadata: Bytes) -> Self {
self.value.metadata = Some(metadata);
self
}
pub fn build(self) -> Frame {
Frame::new(self.stream_id, Body::MetadataPush(self.value), self.flag)
}
}
impl MetadataPush {
pub(crate) fn decode(flag: u16, bf: &mut BytesMut) -> crate::Result<MetadataPush> {
Ok(MetadataPush {
metadata: Some(bf.split().freeze()),
})
}
pub fn builder(stream_id: u32, flag: u16) -> MetadataPushBuiler {
MetadataPushBuiler::new(stream_id, flag)
}
pub fn get_metadata(&self) -> Option<&Bytes> {
self.metadata.as_ref()
}
pub fn split(self) -> (Option<Bytes>, Option<Bytes>) {
(None, self.metadata)
}
}
impl Writeable for MetadataPush {
fn write_to(&self, bf: &mut BytesMut) {
match &self.metadata {
Some(v) => bf.extend_from_slice(v),
None => (),
}
}
fn len(&self) -> usize {
match &self.metadata {
Some(v) => v.len(),
None => 0,
}
}
}
|
pub fn hamming_distance(a: &str, b: &str) -> Result<usize, String> {
match a.len() == b.len() {
true => Ok(a.chars().zip(b.chars()).filter(|x| x.0 != x.1).count()),
false => Err("Strings need to be of equal length".to_string())
}
} |
fn main() {
let control = 3;
//if expression
if control < 5 {
println!("Control number is lower than 5.");
} else {
println!("Control number is NOT lower than 5.");
}
//multiple conditions with else if
let mut number = 15;
check_divisibility(number);
number = 12;
check_divisibility(number);
number = 17;
check_divisibility(number);
//if in a let statement
let condition = false;
let x = if condition {
12
} else {
34
};
println!("The value of x is: {}", x);
//simple loop
let mut counter = 0;
loop {
if counter == 10 {
println!("===========");
break;
}
println!("I'm looping.");
counter += 1;
}
//while loop
counter = 0;
while counter < 10 {
println!("I'm looping with while condition.");
counter += 1;
}
println!("===========");
//looping through a collection with for
let array = [3, 4, 7, 13, 19, 23];
for element in array.iter() {
println!("Element of an array: {}", element);
}
println!("===========");
//using Range in for loop
for number in (1..10).rev() {
println!("== {} ==", number);
}
println!("== START ==");
//get the nth Fibonacci number
let fib = get_fib(5);
println!("Fibonacci number: {}", fib);
}
fn check_divisibility(x: i32) {
if x % 5 == 0 {
println!("The number is divisible by 5.");
} else if x % 4 == 0 {
println!("The number is divisible by 4.");
} else if x % 3 == 0 {
println!("The number is divisible by 3.");
} else if x % 2 == 0 {
println!("The number is divisible by 2.");
} else {
println!("The number is not divisible by 5, 4, 3 or 2.");
}
}
fn get_fib(x: u32) -> u64 {
let mut fib_number = 1;
let mut prev_fib = 1;
let mut tmp;
if x != 0 && x != 1 {
for _index in 2..=x {
tmp = fib_number;
fib_number += prev_fib;
prev_fib = tmp;
}
}
fib_number
}
|
//! Sources for types can be found
//! [here](http://img.pokemondb.net/images/typechart-gen2345.png).
pub enum TypeName {
Bug,
Dark,
Dragon,
Electric,
Fighting,
Fire,
Flying,
Ghost,
Grass,
Ground,
Ice,
Normal,
Poison,
Psychic,
Rock,
Steel,
Water,
}
pub enum Damage {
Double,
Equal,
Half,
Zero,
}
pub struct DamageTo {
damage: Damage,
type_name: TypeName,
}
pub struct Type {
name: TypeName,
to: [DamageTo; 17],
}
pub static BUG: Type = Type {
name: TypeName::Bug,
to: [
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Water,
},
],
};
pub static DARK: Type = Type {
name: TypeName::Dark,
to: [
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Water,
},
],
};
pub static DRAGON: Type = Type {
name: TypeName::Dragon,
to: [
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Water,
},
],
};
pub static ELECTRIC: Type = Type {
name: TypeName::Electric,
to: [
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Zero,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Water,
},
],
};
pub static FIGHTING: Type = Type {
name: TypeName::Fighting,
to: [
DamageTo {
damage: Damage::Half,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Zero,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Water,
},
],
};
pub static FIRE: Type = Type {
name: TypeName::Fire,
to: [
DamageTo {
damage: Damage::Double,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Water,
},
],
};
pub static FLYING: Type = Type {
name: TypeName::Flying,
to: [
DamageTo {
damage: Damage::Double,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Water,
},
],
};
pub static GHOST: Type = Type {
name: TypeName::Ghost,
to: [
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Zero,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Water,
},
],
};
pub static GRASS: Type = Type {
name: TypeName::Grass,
to: [
DamageTo {
damage: Damage::Half,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Water,
},
],
};
pub static GROUND: Type = Type {
name: TypeName::Ground,
to: [
DamageTo {
damage: Damage::Half,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Zero,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Water,
},
],
};
pub static ICE: Type = Type {
name: TypeName::Ice,
to: [
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Water,
},
],
};
pub static NORMAL: Type = Type {
name: TypeName::Bug,
to: [
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Zero,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Water,
},
],
};
pub static POISON: Type = Type {
name: TypeName::Bug,
to: [
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Zero,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Water,
},
],
};
pub static PSYCHIC: Type = Type {
name: TypeName::Psychic,
to: [
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Zero,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Water,
},
],
};
pub static ROCK: Type = Type {
name: TypeName::Rock,
to: [
DamageTo {
damage: Damage::Double,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Water,
},
],
};
pub static STEEL: Type = Type {
name: TypeName::Steel,
to: [
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Water,
},
],
};
pub static WATER: Type = Type {
name: TypeName::Water,
to: [
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Bug,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Dark,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Dragon,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Electric,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Fighting,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Fire,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Flying,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ghost,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Grass,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Ground,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Ice,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Normal,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Poison,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Psychic,
},
DamageTo {
damage: Damage::Double,
type_name: TypeName::Rock,
},
DamageTo {
damage: Damage::Equal,
type_name: TypeName::Steel,
},
DamageTo {
damage: Damage::Half,
type_name: TypeName::Water,
},
],
};
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::CHIPREV {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct SIPARTR {
bits: u16,
}
impl SIPARTR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u16 {
self.bits
}
}
#[doc = "Possible values of the field `REVMAJ`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum REVMAJR {
#[doc = "Apollo3 revision B value."]
B,
#[doc = "Apollo3 revision A value."]
A,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl REVMAJR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
REVMAJR::B => 2,
REVMAJR::A => 1,
REVMAJR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> REVMAJR {
match value {
2 => REVMAJR::B,
1 => REVMAJR::A,
i => REVMAJR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `B`"]
#[inline]
pub fn is_b(&self) -> bool {
*self == REVMAJR::B
}
#[doc = "Checks if the value of the field is `A`"]
#[inline]
pub fn is_a(&self) -> bool {
*self == REVMAJR::A
}
}
#[doc = "Possible values of the field `REVMIN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum REVMINR {
#[doc = "Apollo3 minor rev 1. value."]
REV1,
#[doc = "Apollo3 minor rev 0. Minor revision value, succeeding minor revisions will increment from this value. value."]
REV0,
#[doc = r" Reserved"]
_Reserved(u8),
}
impl REVMINR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
REVMINR::REV1 => 2,
REVMINR::REV0 => 1,
REVMINR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> REVMINR {
match value {
2 => REVMINR::REV1,
1 => REVMINR::REV0,
i => REVMINR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `REV1`"]
#[inline]
pub fn is_rev1(&self) -> bool {
*self == REVMINR::REV1
}
#[doc = "Checks if the value of the field is `REV0`"]
#[inline]
pub fn is_rev0(&self) -> bool {
*self == REVMINR::REV0
}
}
#[doc = r" Proxy"]
pub struct _SIPARTW<'a> {
w: &'a mut W,
}
impl<'a> _SIPARTW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
const MASK: u16 = 4095;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `REVMAJ`"]
pub enum REVMAJW {
#[doc = "Apollo3 revision B value."]
B,
#[doc = "Apollo3 revision A value."]
A,
}
impl REVMAJW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
REVMAJW::B => 2,
REVMAJW::A => 1,
}
}
}
#[doc = r" Proxy"]
pub struct _REVMAJW<'a> {
w: &'a mut W,
}
impl<'a> _REVMAJW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: REVMAJW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Apollo3 revision B value."]
#[inline]
pub fn b(self) -> &'a mut W {
self.variant(REVMAJW::B)
}
#[doc = "Apollo3 revision A value."]
#[inline]
pub fn a(self) -> &'a mut W {
self.variant(REVMAJW::A)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 15;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `REVMIN`"]
pub enum REVMINW {
#[doc = "Apollo3 minor rev 1. value."]
REV1,
#[doc = "Apollo3 minor rev 0. Minor revision value, succeeding minor revisions will increment from this value. value."]
REV0,
}
impl REVMINW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
REVMINW::REV1 => 2,
REVMINW::REV0 => 1,
}
}
}
#[doc = r" Proxy"]
pub struct _REVMINW<'a> {
w: &'a mut W,
}
impl<'a> _REVMINW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: REVMINW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "Apollo3 minor rev 1. value."]
#[inline]
pub fn rev1(self) -> &'a mut W {
self.variant(REVMINW::REV1)
}
#[doc = "Apollo3 minor rev 0. Minor revision value, succeeding minor revisions will increment from this value. value."]
#[inline]
pub fn rev0(self) -> &'a mut W {
self.variant(REVMINW::REV0)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 15;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 8:19 - Silicon Part ID"]
#[inline]
pub fn sipart(&self) -> SIPARTR {
let bits = {
const MASK: u16 = 4095;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) as u16
};
SIPARTR { bits }
}
#[doc = "Bits 4:7 - Major Revision ID."]
#[inline]
pub fn revmaj(&self) -> REVMAJR {
REVMAJR::_from({
const MASK: u8 = 15;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bits 0:3 - Minor Revision ID."]
#[inline]
pub fn revmin(&self) -> REVMINR {
REVMINR::_from({
const MASK: u8 = 15;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 1 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 8:19 - Silicon Part ID"]
#[inline]
pub fn sipart(&mut self) -> _SIPARTW {
_SIPARTW { w: self }
}
#[doc = "Bits 4:7 - Major Revision ID."]
#[inline]
pub fn revmaj(&mut self) -> _REVMAJW {
_REVMAJW { w: self }
}
#[doc = "Bits 0:3 - Minor Revision ID."]
#[inline]
pub fn revmin(&mut self) -> _REVMINW {
_REVMINW { w: self }
}
}
|
use std::path::PathBuf;
use chrono::NaiveDate;
use include_dir::{include_dir, Dir, File};
use markdown::mdast::Node;
use miette::{Context, IntoDiagnostic, Result};
use serde::{Deserialize, Serialize};
use crate::posts::{MarkdownAst, Post};
use super::{
blog::ValidateMarkdown,
date::{ByRecency, PostedOn},
title::Title,
};
pub(crate) static TIL_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/../til");
#[derive(Debug, Clone)]
pub(crate) struct TilPosts {
pub(crate) posts: Vec<TilPost>,
}
pub(crate) type TilPost = Post<FrontMatter>;
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq)]
pub(crate) struct FrontMatter {
pub title: String,
pub date: NaiveDate,
pub slug: String,
}
impl PostedOn for FrontMatter {
fn posted_on(&self) -> chrono::NaiveDate {
self.date
}
}
impl Title for FrontMatter {
fn title(&self) -> &str {
&self.title
}
}
impl TilPost {
fn from_file(file: &File) -> Result<Self> {
let ast = MarkdownAst::from_file(file)?;
let metadata: FrontMatter = ast.frontmatter()?;
let path = file.path().to_owned();
Ok(Self {
ast,
path,
frontmatter: metadata,
})
}
pub(crate) fn validate(&self) -> Result<()> {
self.validate_images()?;
Ok(())
}
fn validate_images(&self) -> Result<()> {
let p = &self.frontmatter.slug;
let p = PathBuf::from(p);
let root_node = Node::Root(self.ast.0.clone());
root_node.validate_images(&p)?;
Ok(())
}
}
impl TilPosts {
pub fn from_static_dir() -> Result<Self> {
Self::from_dir(&TIL_DIR)
}
pub fn from_dir(dir: &Dir) -> Result<Self> {
let posts = dir
.find("**/*.md")
.into_diagnostic()?
.filter_map(|e| e.as_file())
.map(TilPost::from_file)
.collect::<Result<Vec<_>>>()
.wrap_err("One of the TILs failed to parse")?;
Ok(Self { posts })
}
pub fn validate(&self) -> Result<()> {
println!("Validating Slug Uniqueness");
for slug in self.posts.iter().map(|til| &til.frontmatter.slug) {
let matches: Vec<_> = self
.posts
.iter()
.filter(|til| &til.frontmatter.slug == slug)
.collect();
if matches.len() > 1 {
let paths = matches
.iter()
.map(|til| til.path.display().to_string())
.collect::<Vec<_>>()
.join(", ");
return Err(miette::miette!(
"Slug {} is not unique. Found these paths {}",
slug,
paths
));
}
}
println!("Validating {} TILs", self.posts.len());
for til in &self.posts {
println!(
"Validating {} from {}...",
til.frontmatter.slug,
til.path.display()
);
til.validate()?;
}
println!("TILs Valid! ✅");
Ok(())
}
pub fn by_recency(&self) -> Vec<&TilPost> {
self.posts.by_recency()
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
pub location: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RetentionPolicy {
pub enabled: bool,
pub days: i32,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricSettings {
#[serde(rename = "timeGrain")]
pub time_grain: String,
pub enabled: bool,
#[serde(rename = "retentionPolicy", default, skip_serializing_if = "Option::is_none")]
pub retention_policy: Option<RetentionPolicy>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LogSettings {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub category: Option<String>,
pub enabled: bool,
#[serde(rename = "retentionPolicy", default, skip_serializing_if = "Option::is_none")]
pub retention_policy: Option<RetentionPolicy>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceDiagnosticSettings {
#[serde(rename = "storageAccountId", default, skip_serializing_if = "Option::is_none")]
pub storage_account_id: Option<String>,
#[serde(rename = "serviceBusRuleId", default, skip_serializing_if = "Option::is_none")]
pub service_bus_rule_id: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub metrics: Vec<MetricSettings>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub logs: Vec<LogSettings>,
#[serde(rename = "workspaceId", default, skip_serializing_if = "Option::is_none")]
pub workspace_id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceDiagnosticSettingsResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<ServiceDiagnosticSettings>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LocalizableString {
pub value: String,
#[serde(rename = "localizedValue", default, skip_serializing_if = "Option::is_none")]
pub localized_value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TableInfoEntry {
#[serde(rename = "tableName", default, skip_serializing_if = "Option::is_none")]
pub table_name: Option<String>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")]
pub end_time: Option<String>,
#[serde(rename = "sasToken", default, skip_serializing_if = "Option::is_none")]
pub sas_token: Option<String>,
#[serde(rename = "sasTokenExpirationTime", default, skip_serializing_if = "Option::is_none")]
pub sas_token_expiration_time: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricAvailabilityLocation {
#[serde(rename = "tableEndpoint", default, skip_serializing_if = "Option::is_none")]
pub table_endpoint: Option<String>,
#[serde(rename = "tableInfo", default, skip_serializing_if = "Vec::is_empty")]
pub table_info: Vec<TableInfoEntry>,
#[serde(rename = "partitionKey", default, skip_serializing_if = "Option::is_none")]
pub partition_key: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricAvailability {
#[serde(rename = "timeGrain", default, skip_serializing_if = "Option::is_none")]
pub time_grain: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub retention: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<MetricAvailabilityLocation>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Unit {
Count,
Bytes,
Seconds,
CountPerSecond,
BytesPerSecond,
Percent,
MilliSeconds,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AggregationType {
None,
Average,
Count,
Minimum,
Maximum,
Total,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricDefinition {
#[serde(rename = "resourceId", default, skip_serializing_if = "Option::is_none")]
pub resource_id: Option<String>,
#[serde(rename = "resourceUri", default, skip_serializing_if = "Option::is_none")]
pub resource_uri: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<LocalizableString>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub category: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub unit: Option<Unit>,
#[serde(rename = "startTime", default, skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(rename = "endTime", default, skip_serializing_if = "Option::is_none")]
pub end_time: Option<String>,
#[serde(rename = "primaryAggregationType", default, skip_serializing_if = "Option::is_none")]
pub primary_aggregation_type: Option<AggregationType>,
#[serde(rename = "supportedAggregationTypes", default, skip_serializing_if = "Vec::is_empty")]
pub supported_aggregation_types: Vec<AggregationType>,
#[serde(rename = "metricAvailabilities", default, skip_serializing_if = "Vec::is_empty")]
pub metric_availabilities: Vec<MetricAvailability>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricDefinitionCollection {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
pub value: Vec<MetricDefinition>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RuleCondition {
#[serde(rename = "odata.type")]
pub odata_type: String,
#[serde(rename = "dataSource", default, skip_serializing_if = "Option::is_none")]
pub data_source: Option<RuleDataSource>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RuleDataSource {
#[serde(rename = "odata.type")]
pub odata_type: String,
#[serde(rename = "resourceUri", default, skip_serializing_if = "Option::is_none")]
pub resource_uri: Option<String>,
#[serde(rename = "legacyResourceId", default, skip_serializing_if = "Option::is_none")]
pub legacy_resource_id: Option<String>,
#[serde(rename = "resourceLocation", default, skip_serializing_if = "Option::is_none")]
pub resource_location: Option<String>,
#[serde(rename = "metricNamespace", default, skip_serializing_if = "Option::is_none")]
pub metric_namespace: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RuleMetricDataSource {
#[serde(flatten)]
pub rule_data_source: RuleDataSource,
#[serde(rename = "metricName", default, skip_serializing_if = "Option::is_none")]
pub metric_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RuleManagementEventClaimsDataSource {
#[serde(rename = "emailAddress", default, skip_serializing_if = "Option::is_none")]
pub email_address: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RuleManagementEventDataSource {
#[serde(flatten)]
pub rule_data_source: RuleDataSource,
#[serde(rename = "eventName", default, skip_serializing_if = "Option::is_none")]
pub event_name: Option<String>,
#[serde(rename = "eventSource", default, skip_serializing_if = "Option::is_none")]
pub event_source: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub level: Option<String>,
#[serde(rename = "operationName", default, skip_serializing_if = "Option::is_none")]
pub operation_name: Option<String>,
#[serde(rename = "resourceGroupName", default, skip_serializing_if = "Option::is_none")]
pub resource_group_name: Option<String>,
#[serde(rename = "resourceProviderName", default, skip_serializing_if = "Option::is_none")]
pub resource_provider_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<String>,
#[serde(rename = "subStatus", default, skip_serializing_if = "Option::is_none")]
pub sub_status: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub claims: Option<RuleManagementEventClaimsDataSource>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ConditionOperator {
GreaterThan,
GreaterThanOrEqual,
LessThan,
LessThanOrEqual,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum TimeAggregationOperator {
Average,
Minimum,
Maximum,
Total,
Last,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ThresholdRuleCondition {
#[serde(flatten)]
pub rule_condition: RuleCondition,
pub operator: ConditionOperator,
pub threshold: f64,
#[serde(rename = "windowSize", default, skip_serializing_if = "Option::is_none")]
pub window_size: Option<String>,
#[serde(rename = "timeAggregation", default, skip_serializing_if = "Option::is_none")]
pub time_aggregation: Option<TimeAggregationOperator>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LocationThresholdRuleCondition {
#[serde(flatten)]
pub rule_condition: RuleCondition,
#[serde(rename = "windowSize", default, skip_serializing_if = "Option::is_none")]
pub window_size: Option<String>,
#[serde(rename = "failedLocationCount")]
pub failed_location_count: i32,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagementEventAggregationCondition {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operator: Option<ConditionOperator>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub threshold: Option<f64>,
#[serde(rename = "windowSize", default, skip_serializing_if = "Option::is_none")]
pub window_size: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ManagementEventRuleCondition {
#[serde(flatten)]
pub rule_condition: RuleCondition,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub aggregation: Option<ManagementEventAggregationCondition>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RuleAction {
#[serde(rename = "odata.type")]
pub odata_type: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RuleEmailAction {
#[serde(flatten)]
pub rule_action: RuleAction,
#[serde(rename = "sendToServiceOwners", default, skip_serializing_if = "Option::is_none")]
pub send_to_service_owners: Option<bool>,
#[serde(rename = "customEmails", default, skip_serializing_if = "Vec::is_empty")]
pub custom_emails: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RuleWebhookAction {
#[serde(flatten)]
pub rule_action: RuleAction,
#[serde(rename = "serviceUri", default, skip_serializing_if = "Option::is_none")]
pub service_uri: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AlertRule {
pub name: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<String>,
#[serde(rename = "isEnabled")]
pub is_enabled: bool,
pub condition: RuleCondition,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub action: Option<RuleAction>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub actions: Vec<RuleAction>,
#[serde(rename = "lastUpdatedTime", default, skip_serializing_if = "Option::is_none")]
pub last_updated_time: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AlertRuleResource {
#[serde(flatten)]
pub resource: Resource,
pub properties: AlertRule,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AlertRuleResourcePatch {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<AlertRule>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AlertRuleResourceCollection {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<AlertRuleResource>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")]
pub is_data_action: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<OperationProperties>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationProperties {
#[serde(rename = "serviceSpecification", default, skip_serializing_if = "Option::is_none")]
pub service_specification: Option<ServiceSpecification>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceSpecification {
#[serde(rename = "logSpecifications", default, skip_serializing_if = "Vec::is_empty")]
pub log_specifications: Vec<LogSpecification>,
#[serde(rename = "metricSpecifications", default, skip_serializing_if = "Vec::is_empty")]
pub metric_specifications: Vec<MetricSpecification>,
#[serde(rename = "legacyMetricSpecifications", default, skip_serializing_if = "Option::is_none")]
pub legacy_metric_specifications: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct LogSpecification {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "blobDuration", default, skip_serializing_if = "Option::is_none")]
pub blob_duration: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetricSpecification {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "displayDescription", default, skip_serializing_if = "Option::is_none")]
pub display_description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub unit: Option<String>,
#[serde(rename = "aggregationType", default, skip_serializing_if = "Option::is_none")]
pub aggregation_type: Option<String>,
#[serde(rename = "supportedAggregationTypes", default, skip_serializing_if = "Vec::is_empty")]
pub supported_aggregation_types: Vec<String>,
#[serde(rename = "supportedTimeGrainTypes", default, skip_serializing_if = "Vec::is_empty")]
pub supported_time_grain_types: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub availabilities: Vec<String>,
#[serde(rename = "lockAggregationType", default, skip_serializing_if = "Option::is_none")]
pub lock_aggregation_type: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub category: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub dimensions: Vec<DimensionProperties>,
#[serde(rename = "fillGapWithZero", default, skip_serializing_if = "Option::is_none")]
pub fill_gap_with_zero: Option<bool>,
#[serde(rename = "internalMetricName", default, skip_serializing_if = "Option::is_none")]
pub internal_metric_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DimensionProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "toBeExportedForShoebox", default, skip_serializing_if = "Option::is_none")]
pub to_be_exported_for_shoebox: Option<bool>,
#[serde(rename = "isHidden", default, skip_serializing_if = "Option::is_none")]
pub is_hidden: Option<bool>,
#[serde(rename = "defaultDimensionValues", default, skip_serializing_if = "Option::is_none")]
pub default_dimension_values: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorContract {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorResponse>,
}
|
fn check_permutation(x: String, y: String) -> bool {
let mut x: Vec<char> = x.chars().collect();
x.sort();
let mut y: Vec<char> = y.chars().collect();
y.sort();
x == y
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn is_permutation() {
assert_eq!(
true,
check_permutation("abc".to_string(), "bca".to_string())
);
}
#[test]
fn not_permutation() {
assert_eq!(
false,
check_permutation("abv".to_string(), "bca".to_string())
);
}
#[test]
fn different_length() {
assert_eq!(
false,
check_permutation("abc1".to_string(), "bca".to_string())
);
}
}
|
fn main() {
const USER_LIMIT: i32 = 100;
const PI: f32 = 3.14;
println!("user limit is : {}", USER_LIMIT);
// println!("pi value is : {}", PI)
let salary = 100.00;
let salary = 4.00;
// read first salary
println!("salary is {}", salary);
let uname = "hsuhau";
let uname = uname.len();
println!("uname changed to integer : {}", uname);
/*
error[E0428]: the name `NAME` is defined multiple times
--> 7_rust_constant.rs:18:5
|
17 | const NAME:&str = "HHH";
| ------------------------ previous definition of the value `NAME` here
18 | const NAME:usize = NAME.len();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `NAME` redefined here
|
= note: `NAME` must be defined only once in the value namespace of this block
error: aborting due to previous error
For more information about this error, try `rustc --explain E0428`.
*/
// unlike variables, constants cannot be shadowed. If variables in the above program are replaced with constants, the compiler will throw an error.
// const NAME:&str = "HHH";
// const NAME:usize = NAME.len();
// println!("name changed to integer : {}", NAME);
} |
use super::{schema::arts, Connection, Postgres};
use artell_domain::{
art::{Art, ArtId, ArtRepository, Size},
artist::ArtistId,
};
use diesel::prelude::*;
use uuid::Uuid;
pub struct PgArtRepository {
pg: Postgres,
}
impl PgArtRepository {
pub fn new(pg: Postgres) -> Self {
PgArtRepository { pg }
}
}
#[async_trait]
impl ArtRepository for PgArtRepository {
async fn find_by_id(&self, id: Uuid) -> anyhow::Result<Option<Art>> {
self.pg
.try_with_conn(move |conn| find_by_id(conn, id))
.await
}
async fn save(&self, art: Art) -> anyhow::Result<()> {
self.pg.try_with_conn(move |conn| save(conn, art)).await
}
}
/*
* ========
* Query
* ========
*/
#[derive(Queryable)]
struct QueriedArt {
id: Uuid,
artist_id: Uuid,
title: String,
materials: String,
width: Option<i32>,
height: Option<i32>,
image_name: String,
portfolio_link: String,
}
impl Into<Art> for QueriedArt {
fn into(self) -> Art {
let size = match (self.width, self.height) {
(None, None) => None,
(Some(width), Some(height)) => Some(Size {
width: width as usize,
height: height as usize,
}),
_ => unreachable!(),
};
Art {
id: ArtId(self.id),
artist_id: ArtistId(self.artist_id),
title: self.title,
materials: self.materials,
size,
image_name: self.image_name,
portfolio_link: self.portfolio_link,
}
}
}
fn find_by_id(conn: Connection, id: Uuid) -> anyhow::Result<Option<Art>> {
Ok(arts::table
.filter(arts::id.eq(id))
.select((
arts::id,
arts::artist_id,
arts::title,
arts::materials,
arts::width,
arts::height,
arts::image_name,
arts::portfolio_link,
))
.first::<QueriedArt>(&conn)
.optional()?
.map(QueriedArt::into))
}
/*
* ========
* Update
* ========
*/
#[derive(Clone, Copy, Insertable, AsChangeset)]
#[table_name = "arts"]
struct NewArt<'a> {
id: &'a Uuid,
artist_id: &'a Uuid,
title: &'a str,
materials: &'a str,
width: Option<i32>,
height: Option<i32>,
image_name: &'a str,
portfolio_link: &'a str,
}
impl<'a> From<&'a Art> for NewArt<'a> {
fn from(art: &'a Art) -> NewArt<'a> {
NewArt {
id: &art.id.0,
artist_id: &art.artist_id.0,
title: art.title.as_str(),
materials: art.materials.as_str(),
width: art.size.map(|size| size.width as i32),
height: art.size.map(|size| size.height as i32),
image_name: art.image_name.as_str(),
portfolio_link: art.portfolio_link.as_str(),
}
}
}
fn save(conn: Connection, art: Art) -> anyhow::Result<()> {
let new_art = NewArt::from(&art);
diesel::insert_into(arts::table)
.values(new_art)
.on_conflict(arts::id)
.do_update()
.set(new_art)
.execute(&conn)?;
Ok(())
}
|
use actix_web::HttpResponse;
use futures::future::Future;
use crate::common::error::UserError;
type FutureJsonResponse = Box<Future<Item = HttpResponse, Error = UserError>>;
#[macro_use]
pub mod macros;
pub mod paste;
|
pub mod voxelgrid;
|
fn main() {
let b = Box::new(5);
println!("b = {}", b);
person();
}
fn person() {
let p = Person {
name: String::from("bugs"),
};
println!("p = {:?}", p);
}
#[derive(Debug)]
struct Person {
name: String,
}
impl Drop for Person {
fn drop(&mut self) {
println!("dropping {}", self.name)
}
}
|
fn main() {
for num in range(1i, 101) {
let answer =
if div_by_fifteen(num) { "FizzBuzz".to_string() }
else if div_by_three(num) { "Fizz".to_string() }
else if div_by_five(num) { "Buzz".to_string() }
else { num.to_string() };
println!("{:s}", answer);
}
}
fn div_by_three(num: int) -> bool {
num % 3 == 0
}
fn div_by_five(num: int) -> bool {
num % 5 == 0
}
fn div_by_fifteen(num: int) -> bool {
num % 15 == 0
}
#[test]
fn test_div_by_fifteen() {
assert!(!div_by_fifteen(1), "One is not fifteen!");
}
#[test]
fn test_div_by_fifteen_with_fifteen() {
assert!(div_by_fifteen(15), "Fifteen should be fifteen!");
}
#[test]
fn test_div_by_five() {
assert!(!div_by_five(1), "One is not five!");
}
#[test]
fn test_div_by_five_with_five() {
assert!(div_by_five(5), "Five should be five!");
}
#[test]
fn test_div_by_three() {
assert!(!div_by_three(1), "One is not three!");
}
#[test]
fn test_div_by_three_with_three() {
assert!(div_by_three(3), "Three should be three!");
}
|
use std::cmp::Ordering::{Equal, Greater, Less};
pub fn two_sum(nums: Vec<i32>, target: i32) -> Vec<i32> {
let (mut l, mut r) = (0, nums.len() - 1);
while l < r {
match (nums[l] + nums[r]).cmp(&target) {
Less => l += 1,
Greater => r -= 1,
Equal => return vec![l as i32 + 1, r as i32 + 1],
}
}
unreachable!()
}
// pub fn two_sum(nums: Vec<i32>, target: i32) -> Vec<i32> {
// let mut l = 0;
// let mut r = nums.len() - 1;
// while l < r {
// let sum = nums[l] + nums[r];
// if sum > target {
// r -= 1;
// } else if sum < target {
// l += 1;
// } else {
// return vec![1 + l as i32, 1 + r as i32];
// }
// }
// vec![]
// }
fn main() {
assert_eq!(two_sum(vec![2, 7, 11, 15], 9), vec![1, 2]);
assert_eq!(two_sum(vec![2, 3, 4], 6), vec![1, 3]);
assert_eq!(two_sum(vec![-1, 0], -1), vec![1, 2]);
}
|
use std::fmt::Debug;
use serde::{Deserialize, Serialize};
use crate::{
document::Document,
schema::{Collection, CollectionName, InvalidNameError, Name, ViewName},
};
/// Types for defining a `Map` within a `View`.
pub mod map;
use map::{Key, Map, MappedValue};
/// Errors that arise when interacting with views.
#[derive(thiserror::Error, Debug)]
// TODO add which view name and collection
pub enum Error {
/// An error occurred while serializing or deserializing.
#[error("error deserializing document {0}")]
Serialization(#[from] serde_cbor::Error),
/// An error occurred while serializing or deserializing keys emitted in a view.
#[error("error serializing view keys {0}")]
KeySerialization(anyhow::Error),
/// Returned when the reduce() function is unimplemented.
#[error("reduce is unimplemented")]
ReduceUnimplemented,
/// Range queries are not supported on collections with encryptable keys.
#[error("range queries are not supported on collections with encryptable keys")]
RangeQueryNotSupported,
}
/// A type alias for the result of `View::map()`.
pub type MapResult<K = (), V = ()> = Result<Option<Map<K, V>>, Error>;
/// A map/reduce powered indexing and aggregation schema.
///
/// Inspired by [`CouchDB`'s view
/// system](https://docs.couchdb.org/en/stable/ddocs/views/index.html)
///
/// This implementation is under active development, our own docs explaining our
/// implementation will be written as things are solidified. The guide [has an
/// overview](https://dev.bonsaidb.io/guide/about/concepts/view.html).
// TODO write our own view docs
pub trait View: Send + Sync + Debug + 'static {
/// The collection this view belongs to
type Collection: Collection;
/// The key for this view.
type Key: Key + 'static;
/// An associated type that can be stored with each entry in the view.
type Value: Serialize + for<'de> Deserialize<'de> + Send + Sync;
/// If true, no two documents may emit the same key. Unique views are
/// updated when the document is saved, allowing for this check to be done
/// atomically. When a document is updated, all unique views will be
/// updated, and if any of them fail, the document will not be allowed to
/// update and an
/// [`Error::UniqueKeyViolation`](crate::Error::UniqueKeyViolation) will be
/// returned.
fn unique(&self) -> bool {
false
}
/// If true, keys will be encrypted if a `default_encryption_key` is
/// specified. This prevents the ability to use range-based queries.
fn keys_are_encryptable(&self) -> bool {
false
}
/// The version of the view. Changing this value will cause indexes to be rebuilt.
fn version(&self) -> u64;
/// The name of the view. Must be unique per collection.
fn name(&self) -> Result<Name, InvalidNameError>;
/// The namespaced name of the view.
fn view_name(&self) -> Result<ViewName, InvalidNameError> {
Ok(ViewName {
collection: Self::Collection::collection_name()?,
name: self.name()?,
})
}
/// The map function for this view. This function is responsible for
/// emitting entries for any documents that should be contained in this
/// View. If None is returned, the View will not include the document.
fn map(&self, document: &Document<'_>) -> MapResult<Self::Key, Self::Value>;
/// The reduce function for this view. If `Err(Error::ReduceUnimplemented)`
/// is returned, queries that ask for a reduce operation will return an
/// error. See [`CouchDB`'s Reduce/Rereduce
/// documentation](https://docs.couchdb.org/en/stable/ddocs/views/intro.html#reduce-rereduce)
/// for the design this implementation will be inspired by
#[allow(unused_variables)]
fn reduce(
&self,
mappings: &[MappedValue<Self::Key, Self::Value>],
rereduce: bool,
) -> Result<Self::Value, Error> {
Err(Error::ReduceUnimplemented)
}
}
/// Represents either an owned value or a borrowed value. Functionally
/// equivalent to `std::borrow::Cow` except this type doesn't require the
/// wrapped type to implement `Clone`.
pub enum SerializableValue<'a, T: Serialize> {
/// an owned value
Owned(T),
/// a borrowed value
Borrowed(&'a T),
}
impl<'a, T> From<&'a T> for SerializableValue<'a, T>
where
T: Serialize,
{
fn from(other: &'a T) -> SerializableValue<'a, T> {
SerializableValue::Borrowed(other)
}
}
impl<'a, T> AsRef<T> for SerializableValue<'a, T>
where
T: Serialize,
{
fn as_ref(&self) -> &T {
match self {
Self::Owned(value) => value,
Self::Borrowed(value) => value,
}
}
}
/// Wraps a [`View`] with serialization to erase the associated types
pub trait Serialized: Send + Sync + Debug {
/// Wraps returing [`<View::Collection as Collection>::collection_name()`](crate::schema::Collection::collection_name)
fn collection(&self) -> Result<CollectionName, InvalidNameError>;
/// Wraps [`View::unique`]
fn unique(&self) -> bool;
/// Wraps [`View::version`]
fn version(&self) -> u64;
/// Wraps [`View::keys_are_encryptable`]
fn keys_are_encryptable(&self) -> bool;
/// Wraps [`View::view_name`]
fn view_name(&self) -> Result<ViewName, InvalidNameError>;
/// Wraps [`View::map`]
fn map(&self, document: &Document<'_>) -> Result<Option<map::Serialized>, Error>;
/// Wraps [`View::reduce`]
fn reduce(&self, mappings: &[(&[u8], &[u8])], rereduce: bool) -> Result<Vec<u8>, Error>;
}
#[allow(clippy::use_self)] // Using Self here instead of T inside of reduce() breaks compilation. The alternative is much more verbose and harder to read.
impl<T> Serialized for T
where
T: View,
<T as View>::Key: 'static,
{
fn collection(&self) -> Result<CollectionName, InvalidNameError> {
<<Self as View>::Collection as Collection>::collection_name()
}
fn unique(&self) -> bool {
self.unique()
}
fn version(&self) -> u64 {
self.version()
}
fn view_name(&self) -> Result<ViewName, InvalidNameError> {
self.view_name()
}
fn map(&self, document: &Document<'_>) -> Result<Option<map::Serialized>, Error> {
let map = self.map(document)?;
map.map(|map| map.serialized()).transpose()
}
fn reduce(&self, mappings: &[(&[u8], &[u8])], rereduce: bool) -> Result<Vec<u8>, Error> {
let mappings = mappings
.iter()
.map(
|(key, value)| match <T::Key as Key>::from_big_endian_bytes(key) {
Ok(key) => match serde_cbor::from_slice::<T::Value>(value) {
Ok(value) => Ok(MappedValue { key, value }),
Err(err) => Err(Error::from(err)),
},
Err(err) => Err(Error::KeySerialization(err)),
},
)
.collect::<Result<Vec<_>, Error>>()?;
let reduced_value = match self.reduce(&mappings, rereduce) {
Ok(value) => value,
Err(Error::ReduceUnimplemented) => return Ok(Vec::new()),
Err(other) => return Err(other),
};
serde_cbor::to_vec(&reduced_value).map_err(Error::from)
}
fn keys_are_encryptable(&self) -> bool {
self.keys_are_encryptable()
}
}
|
use std::cell::UnsafeCell;
use std::collections::VecDeque;
use std::sync::atomic::{AtomicPtr, AtomicU32, Ordering};
use std::sync::Arc;
use std::thread;
const READER: u32 = 1;
const WRITER_PENDING: u32 = 2;
const WRITER: u32 = 3;
pub trait ConcurrentCollection {
type Event;
type Context: Clone;
/// Used to create the collection we are updating.
fn create(context: Self::Context) -> Self;
/// Applies an event on the data structure.
fn apply(&mut self, event: &Self::Event);
}
struct CollectionContainer<C: ConcurrentCollection> {
col: C,
reader_count: AtomicU32,
state: AtomicU32,
event_stream: VecDeque<C::Event>,
}
impl<C: ConcurrentCollection> CollectionContainer<C> {
fn new(context: C::Context, state: u32, event_stream_size: usize) -> Self {
CollectionContainer {
col: C::create(context),
reader_count: AtomicU32::new(0),
state: AtomicU32::new(state),
event_stream: VecDeque::with_capacity(event_stream_size),
}
}
}
unsafe impl<C: ConcurrentCollection> Sync for CollectionContainer<C> {}
unsafe impl<C: ConcurrentCollection> Send for CollectionContainer<C> {}
/// Represents a generic collection you can apply an event stream to.
struct MrswCollection<C: ConcurrentCollection> {
/// The current reader.
current_reader: AtomicPtr<CollectionContainer<C>>,
/// The action to run to apply the changes.
current_writer: AtomicPtr<CollectionContainer<C>>,
col1: CollectionContainer<C>,
col2: CollectionContainer<C>,
}
pub fn create_mrsw_collection<C: ConcurrentCollection>(
context: C::Context,
) -> (MrswCollectionReader<C>, MrswCollectionWriter<C>) {
let mut con1 = CollectionContainer::new(context.clone(), READER, 1024);
let mut con2 = CollectionContainer::new(context, WRITER, 1024);
let col = MrswCollection {
current_reader: AtomicPtr::new(&mut con1),
current_writer: AtomicPtr::new(&mut con2),
col1: con1,
col2: con2,
};
let col_arc = Arc::new(UnsafeCell::new(col));
let v = unsafe { &mut *col_arc.get() };
v.current_reader.store(&mut v.col1, Ordering::Release);
v.current_writer.store(&mut v.col2, Ordering::Release);
(
MrswCollectionReader {
map: col_arc.clone(),
},
MrswCollectionWriter { map: col_arc },
)
}
pub struct MrswCollectionReader<C: ConcurrentCollection> {
map: Arc<UnsafeCell<MrswCollection<C>>>,
}
impl<C: ConcurrentCollection> MrswCollectionReader<C> {
pub fn get<F, R>(&self, act: F) -> R
where
F: FnOnce(&C) -> R,
{
let v = unsafe { &mut *self.map.get() };
v.get(act)
}
}
unsafe impl<C: ConcurrentCollection> Sync for MrswCollectionReader<C> {}
unsafe impl<C: ConcurrentCollection> Send for MrswCollectionReader<C> {}
pub struct MrswCollectionWriter<C: ConcurrentCollection> {
map: Arc<UnsafeCell<MrswCollection<C>>>,
}
impl<C: ConcurrentCollection> MrswCollectionWriter<C> {
pub fn add_event(&mut self, event: C::Event) {
let v = unsafe { &mut *self.map.get() };
v.add_event(event);
}
pub fn commit(&mut self) {
let v = unsafe { &mut *self.map.get() };
v.commit();
}
}
unsafe impl<C: ConcurrentCollection> Send for MrswCollectionWriter<C> {}
impl<C: ConcurrentCollection> MrswCollection<C> {
fn add_event(&mut self, event: C::Event) {
unsafe {
let writer = &mut *self.current_writer.load(Ordering::Acquire);
writer.col.apply(&event);
}
unsafe {
let reader = &mut *self.current_reader.load(Ordering::Acquire);
reader.event_stream.push_back(event);
}
}
fn commit(&mut self) {
let reader = unsafe { &mut *self.current_reader.load(Ordering::Acquire) };
let writer = unsafe { &mut *self.current_writer.load(Ordering::Acquire) };
writer.state.store(READER, Ordering::Release);
reader.state.store(WRITER_PENDING, Ordering::Release);
self.current_reader.store(writer, Ordering::Release);
self.current_writer.store(reader, Ordering::Release);
loop {
if reader.reader_count.load(Ordering::Acquire) == 0 {
reader.state.store(WRITER, Ordering::Release);
break;
} else {
thread::yield_now()
}
}
loop {
let event = reader.event_stream.pop_front();
match event {
Some(e) => {
reader.col.apply(&e);
}
None => {
break;
}
}
}
}
fn get<F, R>(&mut self, act: F) -> R
where
F: FnOnce(&C) -> R,
{
loop {
let reader = self.current_reader.load(Ordering::Acquire);
unsafe { (*reader).reader_count.fetch_add(1, Ordering::Relaxed) };
if unsafe { (*reader).state.load(Ordering::Acquire) } == READER {
let v = unsafe { &(*reader).col };
let r = act(v);
unsafe { (*reader).reader_count.fetch_sub(1, Ordering::Release) };
break r;
} else {
unsafe { (*reader).reader_count.fetch_sub(1, Ordering::Release) };
thread::yield_now()
}
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[derive(Clone)]
struct ValueTest {
id: u32,
}
struct ApplyTest {
value: ValueTest,
}
enum EventTest {
MyEvent(u32),
}
#[test]
pub fn create_mrsw_collection_test() {
let value = ValueTest { id: 1 };
let (reader, mut writer) = create_mrsw_collection(value);
writer.add_event(EventTest::MyEvent(2));
writer.commit();
let r = reader.get(|v: &ApplyTest| v.value.id);
assert_eq!(2, r);
}
impl ConcurrentCollection for ApplyTest {
type Event = EventTest;
type Context = ValueTest;
fn create(context: ValueTest) -> Self {
Self { value: context }
}
fn apply(&mut self, event: &EventTest) {
match event {
EventTest::MyEvent(val) => {
self.value.id = val.clone();
}
}
}
}
}
|
#![allow(clippy::many_single_char_names)]
use num_traits::{AsPrimitive, FromPrimitive, PrimInt, ToPrimitive, Unsigned};
use packedvec::PackedVec;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use vob::Vob;
/// A SparseVec efficiently encodes a two-dimensional matrix of integers. The input matrix must be
/// encoded as a one-dimensional vector of integers with a row-length. Given an "empty" value, the
/// SparseVec uses row displacement to compress that value as described in "Storing a sparse table"
/// by Robert Endre Tarjan and Andrew Chi-Chih Yao. Afterwards it encodes the result further using
/// a PackedVec.
///
/// # Example
///
/// ```
/// extern crate sparsevec;
/// use sparsevec::SparseVec;
///
/// fn main() {
/// let v:Vec<usize> = vec![1,0,0,0,
/// 0,0,7,8,
/// 9,0,0,3];
/// let sv = SparseVec::from(&v, 0, 4);
/// assert_eq!(sv.get(0,0).unwrap(), 1);
/// assert_eq!(sv.get(1,2).unwrap(), 7);
/// assert_eq!(sv.get(2,3).unwrap(), 3);
/// }
/// ```
///
/// # How it works
///
/// Let's take as an example the two-dimensional vector
/// ```text
/// 1 0 0
/// 2 0 0
/// 3 0 0
/// 0 0 4
/// ```
/// represented as a one dimensional vector `v = [1,0,0,2,0,0,3,0,0,0,0,4]` with row-length 3.
/// Storing this vector in memory is wasteful as the majority of its elements is 0. We can compress
/// this vector using row displacement, which merges all rows into a vector such that non-zero
/// entries are never mapped to the same position. For the above example, this would result in the
/// compressed vector `c = [1,2,3,0,4]`:
/// ```text
/// 1 0 0
/// 2 0 0
/// 3 0 0
/// 0 0 4
/// ---------
/// 1 2 3 0 4
/// ```
/// To retrieve values from the compressed vector, we need a displacement vector, which
/// describes how much each row was shifted during the compression. For the above example, the
/// displacement vector would be `d = [0, 1, 2, 2]`. In order to retrieve the value at
/// position (2, 0), we can calculate its compressed position with `pos = d[row] + col`:
/// ```text
/// pos = d[2] + 0 // =2
/// value = c[pos] // =3
/// ```
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Debug)]
pub struct SparseVec<T> {
displacement: Vec<usize>, // Displacement vector
row_length: usize, // Row length of the input matrix
empty_val: T, // Value considered "empty"
empties: Vob<u64>, // Mapping of "empty" cells
data: PackedVec<T, u64>, // Compressed matrix
}
impl<T: Clone + Copy + PartialEq> SparseVec<T>
where
T: AsPrimitive<u64> + FromPrimitive + Ord + PrimInt + ToPrimitive + Unsigned,
u64: AsPrimitive<T>,
{
/// Constructs a new SparseVec from a `Vec` of unsigned integers where `empty_val` describes
/// the values to be compressed and `row_length` the element size per row in the original
/// two-dimensional vector.
///
/// # Examples
/// ```
/// use sparsevec::SparseVec;
/// let v:Vec<usize> = vec![1,2,3,4,5,6,7,8];
/// let sv = SparseVec::from(&v, 0, 4);
/// assert_eq!(sv.get(1,2).unwrap(), 7);
/// ```
pub fn from(v: &[T], empty_val: T, row_length: usize) -> SparseVec<T> {
if v.is_empty() {
return SparseVec {
displacement: Vec::new(),
row_length: 0,
empty_val,
empties: Vob::<u64>::new_with_storage_type(0),
data: PackedVec::<T, u64>::new_with_storaget(v.to_vec()),
};
}
// Sort rows by amount of empty values as suggested in
// "Smaller faster table driven parser" by S. F. Zeigler
let s = sort(v, empty_val, row_length);
let (c, d) = compress(v, &s, empty_val, row_length);
let e = calc_empties(v, empty_val);
let pv = PackedVec::<T, u64>::new_with_storaget(c);
SparseVec {
displacement: d,
row_length,
empty_val,
empties: e,
data: pv,
}
}
/// Returns the value of the element at position `(r,c)`, where `r` is a row and `c` is a
/// column. Returns `None` if out of bounds.
///
/// # Examples
/// ```
/// use sparsevec::SparseVec;
/// let v:Vec<usize> = vec![1,2,3,4,5,6,7,8];
/// let sv = SparseVec::from(&v, 0, 4);
/// assert_eq!(sv.get(1,2).unwrap(), 7);
/// ```
pub fn get(&self, r: usize, c: usize) -> Option<T> {
let k = r * self.row_length + c;
match self.empties.get(k) {
None => None,
Some(true) => Some(self.empty_val),
Some(false) => self.data.get(self.displacement[r] + c),
}
}
/// Returns the number of elements of the original input vector.
/// # Examples
/// ```
/// use sparsevec::SparseVec;
/// let v = vec![1,2,3,4];
/// let sv = SparseVec::from(&v, 0 as usize, 2);
/// assert_eq!(sv.len(), 4);
/// ```
pub fn len(&self) -> usize {
self.empties.len()
}
/// Returns true if the SparseVec has no elements or false otherwise.
/// # Examples
/// ```
/// use sparsevec::SparseVec;
/// let v = Vec::new();
/// let sv = SparseVec::from(&v, 0 as usize, 0);
/// assert_eq!(sv.is_empty(), true);
/// ```
pub fn is_empty(&self) -> bool {
self.empties.is_empty()
}
}
fn calc_empties<T: PartialEq>(vec: &[T], empty_val: T) -> Vob<u64> {
let mut vob = Vob::<u64>::from_elem_with_storage_type(false, vec.len());
for (i, v) in vec.iter().enumerate() {
if *v == empty_val {
vob.set(i, true);
}
}
vob
}
fn compress<T: Clone + Copy + PartialEq>(
vec: &[T],
sorted: &[usize],
empty_val: T,
row_length: usize,
) -> (Vec<T>, Vec<usize>) {
let mut r = Vec::new(); // Result vector
r.resize(row_length, empty_val);
let mut dv = Vec::new(); // displacement vector
dv.resize(sorted.len(), 0);
for s in sorted {
let slice = &vec[s * row_length..(s + 1) * row_length];
let mut d = 0; // displacement value
loop {
if fits(slice, &r, d, empty_val) {
apply(slice, &mut r, d, empty_val);
dv[*s] = d;
break;
} else {
d += 1;
if d + row_length > r.len() {
r.resize(d + row_length, empty_val); // increase result vector size
}
}
}
}
(r, dv)
}
fn fits<T: PartialEq>(v: &[T], target: &[T], d: usize, empty_val: T) -> bool {
for i in 0..v.len() {
if v[i] != empty_val && target[d + i] != empty_val && target[d + i] != v[i] {
return false;
}
}
true
}
fn apply<T: Copy + PartialEq>(v: &[T], target: &mut [T], d: usize, empty_val: T) {
for i in 0..v.len() {
if v[i] != empty_val {
target[d + i] = v[i]
}
}
}
fn sort<T: PartialEq>(v: &[T], empty_val: T, row_length: usize) -> Vec<usize>
where
T: PartialEq<T>,
{
let mut o: Vec<usize> = (0..v.len() / row_length).collect();
o.sort_by_key(|x| {
v[(x * row_length)..((x + 1) * row_length)]
.iter()
.filter(|y| *y == &empty_val)
.count()
});
o
}
#[cfg(test)]
mod test {
extern crate rand;
use super::*;
#[test]
fn test_sparsevec() {
let v = vec![0, 1, 2, 3, 4, 0, 0, 0, 0, 0, 5, 6, 0, 7, 8, 0];
let sv = SparseVec::from(&v, 0 as usize, 4);
assert_eq!(sv.get(0, 0).unwrap(), 0);
assert_eq!(sv.get(0, 1).unwrap(), 1);
assert_eq!(sv.get(0, 2).unwrap(), 2);
assert_eq!(sv.get(0, 3).unwrap(), 3);
assert_eq!(sv.get(1, 0).unwrap(), 4);
assert_eq!(sv.get(1, 1).unwrap(), 0);
assert_eq!(sv.get(2, 2).unwrap(), 5);
assert_eq!(sv.get(2, 3).unwrap(), 6);
assert_eq!(sv.get(3, 0).unwrap(), 0);
assert_eq!(sv.get(3, 1).unwrap(), 7);
assert_eq!(sv.get(3, 2).unwrap(), 8);
assert_eq!(sv.get(3, 3).unwrap(), 0);
}
#[test]
fn test_sparsevec_empty() {
let v = Vec::new();
let sv = SparseVec::from(&v, 0 as usize, 0);
assert_eq!(sv.len(), 0);
assert_eq!(sv.get(0, 0), None);
assert_eq!(sv.is_empty(), true);
}
fn random_sparsevec(row_length: usize) {
const LENGTH: usize = 2000;
let mut v: Vec<u16> = Vec::with_capacity(LENGTH);
for _ in 0..LENGTH {
if rand::random::<u8>() < 128 {
v.push(0);
} else {
v.push(rand::random::<u16>() % 1000);
}
}
let sv = SparseVec::from(&v, 0, row_length);
let rows = LENGTH / row_length;
for r in 0..rows {
for c in 0..row_length {
assert_eq!(sv.get(r, c).unwrap(), v[r * row_length + c]);
}
}
}
#[test]
fn random_vec() {
random_sparsevec(5);
random_sparsevec(10);
random_sparsevec(20);
random_sparsevec(50);
random_sparsevec(100);
}
#[test]
fn test_sparsevec_compress_same_values() {
let v = vec![0, 1, 2, 3, 0, 1, 2, 3, 1, 2, 3, 0, 0, 1, 2, 0];
let s: Vec<usize> = (0..v.len() / 4).collect();
let (c, d) = compress(&v, &s, 0 as usize, 4);
assert_eq!(c, vec![0, 1, 2, 3, 0]);
assert_eq!(d, vec![0, 0, 1, 0]);
let sv = SparseVec::from(&v, 0 as usize, 4);
assert_eq!(sv.get(0, 0).unwrap(), 0);
assert_eq!(sv.get(0, 1).unwrap(), 1);
assert_eq!(sv.get(0, 2).unwrap(), 2);
assert_eq!(sv.get(0, 3).unwrap(), 3);
assert_eq!(sv.get(1, 0).unwrap(), 0);
assert_eq!(sv.get(1, 1).unwrap(), 1);
assert_eq!(sv.get(2, 0).unwrap(), 1);
assert_eq!(sv.get(2, 1).unwrap(), 2);
assert_eq!(sv.get(2, 2).unwrap(), 3);
assert_eq!(sv.get(2, 3).unwrap(), 0);
assert_eq!(sv.get(3, 0).unwrap(), 0);
assert_eq!(sv.get(3, 1).unwrap(), 1);
assert_eq!(sv.get(3, 2).unwrap(), 2);
assert_eq!(sv.get(3, 3).unwrap(), 0);
}
#[test]
fn test_sort_function() {
let v = vec![1, 0, 0, 0, 8, 9, 0, 0, 5, 6, 7, 0, 1, 2, 3, 4];
let s = sort(&v, 0, 4);
assert_eq!(s, [3, 2, 1, 0]);
let v = vec![1, 0, 1, 0, 0, 1, 0, 0, 8, 9, 0, 0, 0, 2, 3, 4];
let s = sort(&v, 0, 4);
assert_eq!(s, [3, 0, 2, 1]);
}
}
|
#[macro_use]
extern crate clap;
use bls12_381::Scalar;
use sapvi::{BlsStringConversion, Decodable, Encodable, ZKContract, ZKProof};
use simplelog::*;
use std::fs;
use std::fs::File;
use std::time::Instant;
//use log::*;
type Result<T> = std::result::Result<T, failure::Error>;
// do the setup for mint.zcd, save the params in mint.setup
fn trusted_setup(contract_data: String, setup_file: String) -> Result<()> {
let start = Instant::now();
let file = File::open(contract_data)?;
let mut contract = ZKContract::decode(file)?;
println!(
"loaded contract '{}': [{:?}]",
contract.name,
start.elapsed()
);
println!("Stats:");
println!(" Constants: {}", contract.vm.constants.len());
println!(" Alloc: {}", contract.vm.alloc.len());
println!(" Operations: {}", contract.vm.ops.len());
println!(
" Constraint Instructions: {}",
contract.vm.constraints.len()
);
contract.setup(&setup_file)?;
Ok(())
}
// make the proof
fn create_proof(
contract_data: String,
setup_file: String,
params: String,
zk_proof: String,
) -> Result<()> {
let start = Instant::now();
let file = File::open(contract_data)?;
let mut contract = ZKContract::decode(file)?;
contract.load_setup(&setup_file)?;
println!(
"Loaded contract '{}': [{:?}]",
contract.name,
start.elapsed()
);
let param_content = fs::read_to_string(params).expect("something went wrong reading the file");
let lines: Vec<&str> = param_content.lines().collect();
for line in lines {
let name = line.split_whitespace().next().unwrap_or("");
let value = line.trim_start_matches(name).trim_start();
contract.set_param(name, Scalar::from_string(value))?;
println!("Set parameter: {}", name);
println!(" Value: {}", value);
}
let proof = contract.prove()?;
let mut file = File::create(zk_proof)?;
proof.encode(&mut file)?;
Ok(())
}
//verify the proof
fn verify_proof(contract_data: String, setup_file: String, zk_proof: String) -> Result<()> {
let contract_file = File::open(contract_data)?;
let mut contract = ZKContract::decode(contract_file)?;
contract.load_setup(&setup_file)?;
let proof_file = File::open(zk_proof)?;
let proof = ZKProof::decode(proof_file)?;
if contract.verify(&proof) {
println!("Zero-knowledge proof verified correctly.")
} else {
eprintln!("Verification failed.")
}
Ok(())
}
// show public values in proof
fn show_public(zk_proof: String) -> Result<()> {
let file = File::open(zk_proof)?;
let proof = ZKProof::decode(file)?;
//assert_eq!(proof.public.len(), 2);
println!("Public values: {:?}", proof.public);
Ok(())
}
fn main() -> Result<()> {
let matches = clap_app!(zkvm =>
(version: "0.1.0")
(author: "Rose O'Leary <rrose@tuta.io>")
(about: "Zero Knowledge Virtual Machine Command Line Interface")
(@subcommand init =>
(about: "Trusted setup phase")
(@arg CONTRACT_DATA: +required "Input zero-knowledge contract data (.zcd)")
(@arg SETUP_FILE: +required "Output setup parameters")
)
(@subcommand prove =>
(about: "Create zero-knowledge proof")
(@arg CONTRACT_DATA: +required "Input zero-knowledge contract data (.zcd)")
(@arg SETUP_FILE: +required "Input setup parameters")
(@arg PARAMS: +required "Input parameters json file")
(@arg ZK_PROOF: +required "Output zero-knowledge proof")
)
(@subcommand verify =>
(about: "Verify zero-knowledge proof")
(@arg CONTRACT_DATA: +required "Input zero-knowledge contract data (.zcd)")
(@arg SETUP_FILE: +required "Input setup parameters")
(@arg ZK_PROOF: +required "Input zero-knowledge proof")
)
(@subcommand show =>
(about: "Show public values in proof")
(@arg ZK_PROOF: +required "Input zero-knowledge proof")
)
)
.get_matches();
CombinedLogger::init(vec![TermLogger::new(
LevelFilter::Debug,
Config::default(),
TerminalMode::Mixed,
)
.unwrap()])
.unwrap();
match matches.subcommand() {
Some(("init", matches)) => {
let contract_data: String = matches.value_of("CONTRACT_DATA").unwrap().parse()?;
let setup_file: String = matches.value_of("SETUP_FILE").unwrap().parse()?;
trusted_setup(contract_data, setup_file)?;
}
Some(("prove", matches)) => {
let contract_data: String = matches.value_of("CONTRACT_DATA").unwrap().parse()?;
let setup_file: String = matches.value_of("SETUP_FILE").unwrap().parse()?;
let params: String = matches.value_of("PARAMS").unwrap().parse()?;
let zk_proof: String = matches.value_of("ZK_PROOF").unwrap().parse()?;
create_proof(contract_data, setup_file, params, zk_proof)?;
}
Some(("verify", matches)) => {
let contract_data: String = matches.value_of("CONTRACT_DATA").unwrap().parse()?;
let setup_file: String = matches.value_of("SETUP_FILE").unwrap().parse()?;
let zk_proof: String = matches.value_of("ZK_PROOF").unwrap().parse()?;
verify_proof(contract_data, setup_file, zk_proof)?;
}
Some(("show", matches)) => {
let zk_proof: String = matches.value_of("ZK_PROOF").unwrap().parse()?;
show_public(zk_proof)?;
}
_ => {
eprintln!("error: Invalid subcommand invoked");
std::process::exit(-1);
}
}
Ok(())
}
|
use crate::solutions::Solution;
pub struct Day12 {}
impl Solution for Day12 {
fn part1(&self, input: String) {
let mut vm = VM::new(input.split('\n').filter_map(Instruction::new).collect());
vm.run();
println!("{}", vm.a);
}
fn part2(&self, input: String) {
let mut vm = VM::new(input.split('\n').filter_map(Instruction::new).collect());
vm.c = 1;
vm.run();
println!("{}", vm.a);
}
}
#[derive(Debug)]
struct VM {
a: i32,
b: i32,
c: i32,
d: i32,
instructions: Vec<Instruction>,
pc: isize,
}
impl VM {
fn new(instructions: Vec<Instruction>) -> VM {
VM {
a: 0,
b: 0,
c: 0,
d: 0,
instructions,
pc: 0,
}
}
fn run(&mut self) {
while self.pc >= 0 && self.pc < self.instructions.len() as isize {
let instruction = self.instructions[self.pc as usize];
match instruction {
Instruction::Cpy { source, dest } => self.set_val(dest, self.source_val(source)),
Instruction::Inc { reg } => self.set_val(reg, 1 + self.val_of(reg)),
Instruction::Dec { reg } => self.set_val(reg, self.val_of(reg) - 1),
Instruction::Jnz { cond, offset } => {
if self.source_val(cond) != 0 {
self.pc += offset - 1 // -1 to account for increment below
}
}
}
self.pc += 1;
}
}
fn source_val(&self, source: Source) -> i32 {
match source {
Source::Reg(reg) => self.val_of(reg),
Source::Imm(imm) => imm,
}
}
fn val_of(&self, reg: Register) -> i32 {
use Register::*;
match reg {
A => self.a,
B => self.b,
C => self.c,
D => self.d,
}
}
fn set_val(&mut self, reg: Register, val: i32) {
use Register::*;
match reg {
A => self.a = val,
B => self.b = val,
C => self.c = val,
D => self.d = val,
}
}
}
#[derive(Copy, Clone, Debug)]
enum Instruction {
Cpy { source: Source, dest: Register },
Inc { reg: Register },
Dec { reg: Register },
Jnz { cond: Source, offset: isize },
}
impl Instruction {
fn new(line: &str) -> Option<Instruction> {
use Instruction::*;
let mut line = line.split_whitespace();
match line.next()? {
"cpy" => {
let source = Source::new(line.next()?)?;
let dest = Register::new(line.next()?)?;
Some(Cpy { source, dest })
}
"inc" => Some(Inc {
reg: Register::new(line.next()?)?,
}),
"dec" => Some(Dec {
reg: Register::new(line.next()?)?,
}),
"jnz" => {
let cond = Source::new(line.next()?)?;
match line.next()?.parse() {
Ok(offset) => Some(Jnz { cond, offset }),
Err(_) => None,
}
}
_ => None,
}
}
}
#[derive(Copy, Clone, Debug)]
enum Register {
A,
B,
C,
D,
}
impl Register {
fn new(name: &str) -> Option<Register> {
use Register::*;
match name {
"a" => Some(A),
"b" => Some(B),
"c" => Some(C),
"d" => Some(D),
_ => None,
}
}
}
#[derive(Copy, Clone, Debug)]
enum Source {
Reg(Register),
Imm(i32),
}
impl Source {
fn new(thing: &str) -> Option<Source> {
use Source::*;
match Register::new(thing) {
Some(reg) => Some(Reg(reg)),
None => match thing.parse() {
Ok(result) => Some(Imm(result)),
Err(_) => None,
},
}
}
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the "hack" directory of this source tree.
use std::path::Path;
use anyhow::bail;
use anyhow::Result;
use crate::decls;
use crate::state::UnitState;
use crate::textual;
const UNIT_START_MARKER: &str = "TEXTUAL UNIT START";
const UNIT_END_MARKER: &str = "TEXTUAL UNIT END";
pub fn textual_writer(
w: &mut dyn std::io::Write,
path: &Path,
mut unit: ir::Unit<'_>,
no_builtins: bool,
) -> Result<()> {
// steal the StringInterner so we can mutate it while reading the Unit.
let strings = std::mem::take(&mut unit.strings);
let escaped_path = escaper::escape(path.display().to_string());
writeln!(w, "// {} {}", UNIT_START_MARKER, escaped_path)?;
textual::write_attribute(w, textual::Attribute::SourceLanguage("hack".to_string()))?;
writeln!(w)?;
let mut state = UnitState::new(strings);
check_fatal(path, unit.fatal.as_ref())?;
for cls in unit.classes {
crate::class::write_class(w, &mut state, cls)?;
}
for func in unit.functions {
crate::func::write_function(w, &mut state, func)?;
}
writeln!(w, "// ----- EXTERNALS -----")?;
for name in state.func_declares.external_funcs() {
writeln!(w, "declare {name}(...): *Mixed")?;
}
if !no_builtins {
decls::write_decls(w)?;
}
writeln!(w, "// {} {}", UNIT_END_MARKER, escaped_path)?;
writeln!(w)?;
Ok(())
}
fn check_fatal(path: &Path, fatal: Option<&ir::Fatal>) -> Result<()> {
if let Some(fatal) = fatal {
let err = match fatal.op {
ir::FatalOp::Parse => "Parse",
ir::FatalOp::Runtime => "Runtime",
ir::FatalOp::RuntimeOmitFrame => "Runtime Omit",
_ => unreachable!(),
};
bail!(
"{err} error in {}[{}]: {}",
path.display(),
fatal.loc.line_begin,
fatal.message
);
}
Ok(())
}
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
use super::ffi;
#[allow(non_upper_case_globals)]
pub static mut clock_gettime: ffi::vdso_clock_gettime_fn = ffi::vdso_clock_gettime_stub;
#[allow(non_upper_case_globals)]
pub static mut getcpu: ffi::vdso_getcpu_fn = ffi::vdso_getcpu_stub;
#[allow(non_upper_case_globals)]
pub static mut gettimeofday: ffi::vdso_gettimeofday_fn = ffi::vdso_gettimeofday_stub;
#[allow(non_upper_case_globals)]
pub static mut time: ffi::vdso_time_fn = ffi::vdso_time_stub;
|
use crate::{components::DebugInfomationDisplay, traits::DebugDisplayFormat};
use amethyst::{
core::Transform,
ecs::{Entities, Entity, Join, ReadStorage, System, WriteStorage},
renderer::debug_drawing::DebugLinesComponent,
ui::{Anchor, FontHandle, LineMode, UiText, UiTransform},
};
use std::{collections::BTreeMap, marker::PhantomData};
pub struct DebugInfomationDisplaySystem<T> {
marker: PhantomData<T>,
system_font: FontHandle,
displayed: BTreeMap<Entity, Entity>,
}
impl<T> DebugInfomationDisplaySystem<T> {
pub fn new(system_font: FontHandle) -> Self {
DebugInfomationDisplaySystem {
marker: PhantomData,
system_font,
displayed: BTreeMap::new(),
}
}
}
impl<'s, T> System<'s> for DebugInfomationDisplaySystem<T>
where
T: DebugDisplayFormat<'s>,
{
type SystemData = (
WriteStorage<'s, UiText>,
WriteStorage<'s, UiTransform>,
WriteStorage<'s, DebugLinesComponent>,
ReadStorage<'s, DebugInfomationDisplay<T>>,
ReadStorage<'s, Transform>,
Entities<'s>,
T::DisplayData,
);
fn run(&mut self, data: Self::SystemData) {
let (
mut texts,
mut ui_transforms,
mut debug_lines,
tags,
transforms,
entities,
display_data,
) = data;
// タグの付いたものだけ
for (_, e, transform) in (&tags, &*entities, &transforms).join() {
// 対応するUIがなければ作る
let mut ui_entity = self.displayed.get(&e).map(|e| *e);
if ui_entity.is_none() {
let entity = entities.create();
self.displayed.insert(e, entity);
ui_entity = Some(entity);
}
match update_ui::<T>(
e,
ui_entity.unwrap(),
transform,
&mut texts,
&mut ui_transforms,
self.system_font.clone(),
&display_data,
) {
Ok(()) => {}
Err(err) => log::error!("error: {:?}", err),
}
match update_debug_lines::<T>(e, &mut debug_lines, &display_data) {
Ok(()) => {}
Err(err) => log::error!("error: {:?}", err),
}
}
}
}
fn update_debug_lines<'s, T>(
base_entity: Entity,
debug_lines: &mut WriteStorage<'s, DebugLinesComponent>,
display_data: &T::DisplayData,
) -> amethyst::Result<()>
where
T: DebugDisplayFormat<'s>,
{
let debug_lines = debug_lines
.entry(base_entity)?
.or_insert(DebugLinesComponent::with_capacity(32));
debug_lines.clear();
T::debug_lines(base_entity, debug_lines, display_data, 1023.);
Ok(())
}
fn update_ui<'s, T>(
base_entity: Entity,
entity: Entity,
transform: &Transform,
texts: &mut WriteStorage<'s, UiText>,
transforms: &mut WriteStorage<'s, UiTransform>,
system_font: FontHandle,
display_data: &T::DisplayData,
) -> amethyst::Result<()>
where
T: DebugDisplayFormat<'s>,
{
let text = texts.entry(entity)?.or_insert(UiText::new(
system_font,
"".to_string(),
[0., 0., 0., 1.],
16.,
));
text.line_mode = LineMode::Wrap;
text.align = Anchor::TopLeft;
let ui_transform = transforms.entry(entity)?.or_insert(UiTransform::new(
format!("debug_{}_{}", entity.id(), entity.gen().id()),
Anchor::Middle,
Anchor::TopLeft,
0.,
0.,
0.,
600.,
600.,
));
ui_transform.local_x = transform.translation().x;
ui_transform.local_y = transform.translation().y;
ui_transform.local_z = transform.translation().z;
text.text = T::display(base_entity, display_data).unwrap_or("".to_string());
Ok(())
}
|
//! Tapes to which programs are written.
//!
//! `Vec<MaybeUninit<usize>>` implements both `AsClearedWriter` and `Writer`
//! when the `std` feature is enabled.
#[cfg(feature = "alloc")]
use alloc::vec::Vec;
use core::mem::MaybeUninit;
/// Types from which a cleared writer can be obtained.
///
/// # Safety
///
/// Types implementing this trait represent a tape, which for safety reasons
/// must respect various invariants that I'm too lazy to list right now,
/// but more or less it just represents a glorified slice that can be
/// made longer.
pub unsafe trait AsClearedWriter: AsRef<[MaybeUninit<usize>]> {
/// Returns a cleared writer from this value.
fn as_cleared_writer(&mut self) -> &mut dyn Writer;
}
/// Types that can be written into.
///
/// # Safety
///
/// This trait is unsafe for the same reasons as `AsClearedWriter`.
pub unsafe trait Writer {
/// Returns the current position of the writer, in words.
fn word_offset(&self) -> usize;
/// Take `n` words from the writer, starting at the current position.
fn take(&mut self, n: usize) -> Result<&mut [MaybeUninit<usize>], UnexpectedEndError>;
}
/// An error that signals that the end of the tape was unexpectedly reached.
#[derive(Clone, Copy, Debug)]
pub struct UnexpectedEndError;
#[cfg(feature = "alloc")]
unsafe impl AsClearedWriter for Vec<MaybeUninit<usize>> {
#[inline(always)]
fn as_cleared_writer(&mut self) -> &mut dyn Writer {
self.clear();
self
}
}
#[cfg(feature = "alloc")]
unsafe impl<'tape> Writer for Vec<MaybeUninit<usize>> {
#[inline(always)]
fn word_offset(&self) -> usize {
self.len()
}
#[inline(always)]
fn take(&mut self, words: usize) -> Result<&mut [MaybeUninit<usize>], UnexpectedEndError> {
let len = self.len();
self.reserve(words);
unsafe {
let slice = core::slice::from_raw_parts_mut(self.as_mut_ptr().add(len), words);
self.set_len(len + words);
Ok(slice)
}
}
}
|
use std::io;
use bytes::BytesMut;
use tokio_util::codec::Decoder;
use persist_core::error::Error;
#[derive(Debug, Clone, Default)]
pub struct LogDecoder {
next_index: usize,
}
impl LogDecoder {
pub fn new() -> Self {
Self { next_index: 0 }
}
}
impl Decoder for LogDecoder {
type Item = String;
type Error = Error;
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
let mut iter = src[self.next_index..].iter().enumerate().peekable();
let position = loop {
let next = iter.next();
match next {
Some((index, b'\n')) => break Some(self.next_index + index),
// Special logic to not split twice when encountering a CRLF ("\r\n").
// We always wait for an additional character after a CR ('\r') and we make sure it isn't a LF ('\n').
Some((index, b'\r')) => match iter.peek() {
Some((index, b'\n')) => break Some(self.next_index + index),
None => break None,
_ => break Some(self.next_index + index),
},
Some(_) => {}
None => {
self.next_index = src.len();
break None;
}
}
};
position
.map(|index| {
self.next_index = 0;
let line = src.split_to(index + 1);
let line = &line[..line.len() - 1];
let line = std::str::from_utf8(line).map_err(|_| {
io::Error::new(io::ErrorKind::InvalidData, "Unable to decode input as UTF8")
})?;
Ok(line.to_string())
})
.transpose()
}
fn decode_eof(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
self.next_index = 0;
if src.is_empty() {
Ok(None)
} else {
let line = src.split();
let line = if matches!(line.last(), Some(b'\r')) {
&line[..line.len() - 1]
} else {
&line[..]
};
let line = std::str::from_utf8(line).map_err(|_| {
io::Error::new(io::ErrorKind::InvalidData, "Unable to decode input as UTF8")
})?;
Ok(Some(line.to_string()))
}
}
}
|
use std::sync::Arc;
use crate::field::aliased_field_name::AliasedFieldName;
use super::{record_index::RecordIndex, record_schema::RecordSchema};
use apllodb_shared_components::{
ApllodbResult, RPos, Schema, SchemaIndex, SqlConvertible, SqlState, SqlValue,
};
use apllodb_storage_engine_interface::Row;
/// Record. Clients, servers, and SQL Processor use this.
#[derive(Clone, PartialEq, Hash, Debug)]
pub struct Record {
pub(crate) schema: Arc<RecordSchema>,
pub(crate) row: Row,
}
impl Record {
pub(crate) fn new(schema: Arc<RecordSchema>, row: Row) -> Self {
Self { schema, row }
}
/// Get Rust value from rec field.
///
/// Returns `None` for NULL.
///
/// # Failures
///
/// - [NameErrorNotFound](apllodb-shared-components::SqlState::NameErrorNotFound) when:
/// - Specified field does not exist in this record.
pub fn get<T: SqlConvertible>(&self, index: &RecordIndex) -> ApllodbResult<Option<T>> {
self.row.get(self.pos(index)?)
}
/// Get sequence of field name vs SqlValue.
pub fn into_name_values(self) -> Vec<(String, SqlValue)> {
self.schema
.to_aliased_field_names()
.into_iter()
.map(|afn| format!("{}", SchemaIndex::from(&afn)))
.zip(self.row.into_values())
.collect()
}
pub(crate) fn get_sql_value(&self, index: &SchemaIndex) -> ApllodbResult<&SqlValue> {
self.row
.get_sql_value(self.pos(&RecordIndex::Name(index.clone()))?)
}
fn pos(&self, index: &RecordIndex) -> ApllodbResult<RPos> {
match index {
RecordIndex::Pos(pos) => Ok(*pos),
RecordIndex::Name(index) => {
let (pos, _) = self.schema.index(index)?;
Ok(pos)
}
}
}
pub(crate) fn helper_get_sql_value(
&self,
joined_name: &AliasedFieldName,
) -> Option<ApllodbResult<SqlValue>> {
self.get_sql_value(&SchemaIndex::from(joined_name))
.map_or_else(
|e| {
if matches!(e.kind(), SqlState::NameErrorNotFound) {
None
} else {
Some(Err(e))
}
},
|sql_value| Some(Ok(sql_value.clone())),
)
}
}
|
use super::Location;
#[allow(clippy::module_name_repetitions)]
#[derive(PartialEq, Eq, Clone, Debug)]
#[cfg_attr(feature = "cuda", derive(rust_cuda::rustacuda_core::DeviceCopy))]
#[cfg_attr(feature = "cuda", rustacuda(core = "rust_cuda::rustacuda_core"))]
pub struct LandscapeExtent {
x: u32,
y: u32,
width: u32,
height: u32,
}
impl LandscapeExtent {
#[must_use]
#[debug_ensures(ret.x() == x, "stores x")]
#[debug_ensures(ret.y() == y, "stores y")]
#[debug_ensures(ret.width() == width, "stores width")]
#[debug_ensures(ret.height() == height, "stores height")]
pub fn new(x: u32, y: u32, width: u32, height: u32) -> Self {
Self {
x,
y,
width,
height,
}
}
#[must_use]
pub fn x(&self) -> u32 {
self.x
}
#[must_use]
pub fn y(&self) -> u32 {
self.y
}
#[must_use]
pub fn width(&self) -> u32 {
self.width
}
#[must_use]
pub fn height(&self) -> u32 {
self.height
}
#[must_use]
pub fn contains(&self, location: &Location) -> bool {
location.x() >= self.x
&& location.x() < (self.x + self.width)
&& location.y() >= self.y
&& location.y() < (self.y + self.height)
}
#[must_use]
pub fn iter(&self) -> LocationIterator {
LocationIterator {
x: self.x,
y: self.y,
extent: self.clone(),
}
}
}
#[derive(Debug)]
pub struct LocationIterator {
x: u32,
y: u32,
extent: LandscapeExtent,
}
impl Iterator for LocationIterator {
type Item = Location;
fn next(&mut self) -> Option<Self::Item> {
if self.y < (self.extent.y() + self.extent.height()) {
let next = Some(Location::new(self.x, self.y));
self.x = if (self.x + 1) >= (self.extent.x() + self.extent.width()) {
self.y += 1;
self.extent.x()
} else {
self.x + 1
};
next
} else {
None
}
}
}
|
//! Simple flat forward drawing pass.
use std::marker::PhantomData;
use cgmath::{Matrix4, One};
use gfx::pso::buffer::ElemStride;
use rayon::iter::ParallelIterator;
use rayon::iter::internal::UnindexedConsumer;
use specs::{Component, Fetch, ParJoin, ReadStorage};
use cam::Camera;
use error::Result;
use mesh::Mesh;
use mtl::Material;
use pipe::pass::{Pass, PassApply, PassData, Supplier};
use pipe::{DepthMode, Effect, NewEffect};
use types::Encoder;
use vertex::{Position, Query, TexCoord};
static VERT_SRC: &[u8] = include_bytes!("shaders/vertex/basic.glsl");
static FRAG_SRC: &[u8] = include_bytes!("shaders/fragment/flat.glsl");
/// Draw mesh without lighting
/// `V` is `VertexFormat`
/// `M` is `Mesh` component
/// `N` is `Material` component
/// `T` is transform matrix component
#[derive(Clone, Debug, PartialEq)]
pub struct DrawFlat<V, M, N, T> {
_pd: PhantomData<(V, M, N, T)>,
}
impl<V, M, N, T> DrawFlat<V, M, N, T>
where
V: Query<(Position, TexCoord)>,
T: Component + AsRef<[[f32; 4]; 4]> + Send + Sync,
M: Component + AsRef<Mesh> + Send + Sync,
N: Component + AsRef<Material> + Send + Sync,
Self: Pass,
{
/// Create instance of `DrawFlat` pass
pub fn new() -> Self {
DrawFlat { _pd: PhantomData }
}
}
#[derive(Clone, Copy, Debug)]
struct VertexArgs {
proj: [[f32; 4]; 4],
view: [[f32; 4]; 4],
model: [[f32; 4]; 4],
}
impl<'a, V, M, N, T> PassData<'a> for DrawFlat<V, M, N, T>
where
V: Query<(Position, TexCoord)>,
T: Component + AsRef<[[f32; 4]; 4]> + Send + Sync,
M: Component + AsRef<Mesh> + Send + Sync,
N: Component + AsRef<Material> + Send + Sync,
{
type Data = (
Option<Fetch<'a, Camera>>,
ReadStorage<'a, M>,
ReadStorage<'a, N>,
ReadStorage<'a, T>,
);
}
impl<'a, V, M, N, T> PassApply<'a> for DrawFlat<V, M, N, T>
where
V: Query<(Position, TexCoord)>,
T: Component + AsRef<[[f32; 4]; 4]> + Send + Sync,
M: Component + AsRef<Mesh> + Send + Sync,
N: Component + AsRef<Material> + Send + Sync,
{
type Apply = DrawFlatApply<'a, V, M, N, T>;
}
impl<V, M, N, T> Pass for DrawFlat<V, M, N, T>
where
V: Query<(Position, TexCoord)>,
T: Component + AsRef<[[f32; 4]; 4]> + Send + Sync,
M: Component + AsRef<Mesh> + Send + Sync,
N: Component + AsRef<Material> + Send + Sync,
{
fn compile(&self, effect: NewEffect) -> Result<Effect> {
use std::mem;
effect
.simple(VERT_SRC, FRAG_SRC)
.with_raw_constant_buffer("VertexArgs", mem::size_of::<VertexArgs>(), 1)
.with_raw_vertex_buffer(V::QUERIED_ATTRIBUTES, V::size() as ElemStride, 0)
.with_texture("albedo")
.with_output("color", Some(DepthMode::LessEqualWrite))
.build()
}
fn apply<'a, 'b: 'a>(
&'a mut self,
supplier: Supplier<'a>,
(camera, mesh, material, global): (
Option<Fetch<'b, Camera>>,
ReadStorage<'b, M>,
ReadStorage<'b, N>,
ReadStorage<'b, T>,
),
) -> DrawFlatApply<'a, V, M, N, T> {
DrawFlatApply {
camera: camera,
mesh: mesh,
material: material,
global: global,
supplier: supplier,
pd: PhantomData,
}
}
}
pub struct DrawFlatApply<'a, V, M: Component, N: Component, T: Component> {
camera: Option<Fetch<'a, Camera>>,
mesh: ReadStorage<'a, M>,
material: ReadStorage<'a, N>,
global: ReadStorage<'a, T>,
supplier: Supplier<'a>,
pd: PhantomData<V>,
}
impl<'a, V, M, N, T> ParallelIterator for DrawFlatApply<'a, V, M, N, T>
where
V: Query<(Position, TexCoord)>,
T: Component + AsRef<[[f32; 4]; 4]> + Send + Sync,
M: Component + AsRef<Mesh> + Send + Sync,
N: Component + AsRef<Material> + Send + Sync,
{
type Item = ();
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where
C: UnindexedConsumer<Self::Item>,
{
let DrawFlatApply {
camera,
mesh,
material,
global,
supplier,
..
} = self;
let camera = &camera;
supplier
.supply((&mesh, &material, &global).par_join().map(
move |(mesh, material, global)| {
move |encoder: &mut Encoder, effect: &mut Effect| {
let mesh = mesh.as_ref();
let vbuf = match mesh.buffer(V::QUERIED_ATTRIBUTES) {
Some(vbuf) => vbuf.clone(),
None => return,
};
let material = material.as_ref();
let vertex_args = camera
.as_ref()
.map(|cam| {
VertexArgs {
proj: cam.proj.into(),
view: cam.to_view_matrix().into(),
model: *global.as_ref(),
}
})
.unwrap_or_else(|| {
VertexArgs {
proj: Matrix4::one().into(),
view: Matrix4::one().into(),
model: *global.as_ref(),
}
});
effect.update_constant_buffer("VertexArgs", &vertex_args, encoder);
effect.data.textures.push(material.albedo.view().clone());
effect.data.samplers.push(material.albedo.sampler().clone());
effect.data.vertex_bufs.push(vbuf);
effect.draw(mesh.slice(), encoder);
}
},
))
.drive_unindexed(consumer)
}
}
|
use super::args::PbFind;
/*******************************************************
* Copyright (C) 2019,2020 Jonathan Gerber <jlgerber@gmail.com>
*
* This file is part of packybara.
*
* packybara can not be copied and/or distributed without the express
* permission of Jonathan Gerber
*******************************************************/
use packybara::db::find_all::revisions::{OrderDirection, OrderRevisionBy};
use packybara::db::traits::*;
use packybara::packrat::{Client, PackratDb};
use prettytable::{cell, format, row, table};
use std::ops::Deref;
use std::str::FromStr;
/// Pretty print the set of revisions from the database that match the provided criteria
///
/// # Arguments
/// * `client` - A Client instance used to connect to the database
/// * `cmd` - A PbFind enum instance used to extract the relevant commandline arguments
///
/// # Returns
/// * a Unit if Ok, or a boxed error if Err
pub fn find(client: Client, cmd: PbFind) -> Result<(), Box<dyn std::error::Error>> {
if let PbFind::Revisions {
id,
transaction_id,
author,
order_by,
order_direction,
limit,
..
} = cmd
{
let mut pb = PackratDb::new(client);
let mut results = pb.find_all_revisions();
let order_by_vec = if let Some(ref ob) = order_by {
ob.split(",")
.map(|x| OrderRevisionBy::from_str(x))
.filter_map(Result::ok)
.collect::<Vec<OrderRevisionBy>>()
} else {
vec![]
};
let order_dir = if let Some(ref dir) = order_direction {
OrderDirection::from_str(dir).ok()
} else {
None
};
results
.id_opt(id)
.transaction_id_opt(transaction_id)
.author_opt(author.as_ref().map(Deref::deref))
.order_by(order_by_vec)
.order_direction_opt(order_dir)
.limit_opt(limit);
let results = results.query()?;
// For now I do this. I need to add packge handling into the query
// either by switching functions or handling the sql on this end
let mut table = table!([bFg => "ID","TRANSACTION ID", "AUTHOR", "DATETIME", "COMMENT"]);
for result in results {
table.add_row(row![
result.id,
result.transaction_id,
result.author,
result.datetime.format("%F %r"),
result.comment,
]);
}
table.set_format(*format::consts::FORMAT_CLEAN); //FORMAT_NO_LINESEP_WITH_TITLE FORMAT_NO_BORDER_LINE_SEPARATOR
table.printstd();
};
Ok(())
}
|
use std::io;
const STR_ALLOCATION_SIZE: usize = 5;
fn main() {
loop {
let mut some_text: String = String::with_capacity(STR_ALLOCATION_SIZE);
io::stdin()
.read_line(&mut some_text)
.expect("Could not read CLI input.");
println!("Parotting input: {}", some_text);
}
} |
struct PartiallyPersistentUnionFindTree {
nodes: Vec<PersistentNode>,
version: Version,
}
type Version = usize;
#[derive(Clone, Copy)]
enum Node {
Root { node_count: usize },
Descendant { parent_index: usize },
}
#[derive(Clone)]
struct PersistentNode {
node: Node,
parent_updated_version: Option<Version>,
node_count_history: Vec<(Version, usize)>,
}
impl PartiallyPersistentUnionFindTree {
pub fn new(size: usize) -> Self {
PartiallyPersistentUnionFindTree {
nodes: vec![
PersistentNode {
node: Node::Root { node_count: 1 },
parent_updated_version: None,
node_count_history: vec![],
};
size
],
version: 0,
}
}
pub fn len(&self) -> usize {
self.nodes.len()
}
pub fn unite(&mut self, l_index: usize, r_index: usize) -> bool {
self.version += 1;
let (mut l_root_index, mut r_root_index) = {
let version = self.version;
(self.find(version, l_index), self.find(version, r_index))
};
if l_root_index == r_root_index {
return false;
}
match (self.nodes[l_root_index].node, self.nodes[r_root_index].node) {
(Node::Root { node_count: l_node_count }, Node::Root { node_count: r_node_count }) => {
let node_count = l_node_count + r_node_count;
if l_node_count < r_node_count {
std::mem::swap(&mut l_root_index, &mut r_root_index);
}
self.nodes[l_root_index].node = Node::Root { node_count: node_count };
self.nodes[l_root_index].node_count_history.push((self.version, node_count));
self.nodes[r_root_index].node = Node::Descendant { parent_index: l_root_index };
self.nodes[r_root_index].parent_updated_version = Some(self.version);
}
_ => unreachable!("`find` must return root index"),
}
true
}
pub fn find(&self, version: Version, index: usize) -> usize {
assert!(version <= self.version());
debug_assert!(index < self.len());
match self.nodes[index].parent_updated_version {
None => index,
Some(parent_updated_version) if version < parent_updated_version => index,
_ => match self.nodes[index].node {
Node::Descendant { parent_index } => self.find(version, parent_index),
_ => unreachable!("`parent_updated_version` is invalid"),
},
}
}
pub fn is_same_group(&self, version: Version, l_index: usize, r_index: usize) -> bool {
assert!(version <= self.version());
debug_assert!(l_index < self.len());
debug_assert!(r_index < self.len());
self.find(version, l_index) == self.find(version, r_index)
}
pub fn count_elements(&self, version: Version, index: usize) -> usize {
assert!(version <= self.version());
debug_assert!(index < self.len());
let node_count_history = self.nodes[index].node_count_history.as_slice();
match search::BinarySearch::binary_search(node_count_history, |&(updated_version, _)| updated_version <= version) {
Some(i) => node_count_history[i].1,
None => 1,
}
}
pub fn version(&self) -> Version {
self.version
}
}
}
|
#![feature(proc_macro)]
extern crate hyper;
extern crate maud;
extern crate futures;
extern crate url;
#[macro_use]
extern crate log;
extern crate env_logger;
#[macro_use]
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate diesel;
use std::collections::HashMap;
use std::io;
use std::env;
use std::error::Error;
use hyper::{Chunk, StatusCode};
use hyper::Method::{Get, Post};
use hyper::server::{Request, Response, Service};
use hyper::header::{ContentLength, ContentType};
use futures::Stream;
use futures::future::{Future, FutureResult};
use diesel::prelude::*;
use diesel::pg::PgConnection;
use maud::html;
mod schema;
mod models;
use models::{Message, NewMessage};
struct Microservice;
impl Service for Microservice {
type Request = Request;
type Response = Response;
type Error = hyper::Error;
type Future = Box<Future<Item = Self::Response, Error = Self::Error>>;
fn call(&self, request: Request) -> Self::Future {
let db_connection = match connect_to_db() {
Some(connection) => connection,
None => {
return Box::new(futures::future::ok(
Response::new().with_status(StatusCode::InternalServerError),
))
}
};
match (request.method(), request.path()) {
(&Post, "/") => {
let future = request
.body()
.concat2()
.and_then(parse_form)
.and_then(move |new_message| write_to_db(new_message, &db_connection))
.then(make_post_response);
Box::new(future)
}
(&Get, "/") => {
let time_range = match request.query() {
Some(query) => parse_query(query),
None => Ok(TimeRange {
before: None,
after: None,
}),
};
let response = match time_range {
Ok(time_range) => make_get_response(query_db(time_range, &db_connection)),
Err(error) => make_error_response(&error),
};
Box::new(response)
}
_ => Box::new(futures::future::ok(
Response::new().with_status(StatusCode::NotFound),
)),
}
}
}
const DEFAULT_DATABASE_URL: &'static str = "postgresql://postgres:postgres@localhost:5432/postgres";
fn connect_to_db() -> Option<PgConnection> {
let database_url = env::var("DATABASE_URL").unwrap_or(String::from(DEFAULT_DATABASE_URL));
match PgConnection::establish(&database_url) {
Ok(connection) => Some(connection),
Err(error) => {
error!("Error connecting to database: {}", error.description());
None
}
}
}
fn parse_form(form_chunk: Chunk) -> FutureResult<NewMessage, hyper::Error> {
let mut form = url::form_urlencoded::parse(form_chunk.as_ref())
.into_owned()
.collect::<HashMap<String, String>>();
if let Some(message) = form.remove("message") {
let username = form.remove("username").unwrap_or(String::from("anonymous"));
futures::future::ok(NewMessage {
username: username,
message: message,
})
} else {
futures::future::err(hyper::Error::from(io::Error::new(
io::ErrorKind::InvalidInput,
"Missing field 'message",
)))
}
}
fn make_post_response(
result: Result<i64, hyper::Error>,
) -> FutureResult<hyper::Response, hyper::Error> {
match result {
Ok(timestamp) => {
let payload = json!({"timestamp": timestamp}).to_string();
let response = Response::new()
.with_header(ContentLength(payload.len() as u64))
.with_header(ContentType::json())
.with_body(payload);
debug!("{:?}", response);
futures::future::ok(response)
}
Err(error) => make_error_response(error.description()),
}
}
fn make_error_response(error_message: &str) -> FutureResult<hyper::Response, hyper::Error> {
let payload = json!({"error": error_message}).to_string();
let response = Response::new()
.with_status(StatusCode::InternalServerError)
.with_header(ContentLength(payload.len() as u64))
.with_header(ContentType::json())
.with_body(payload);
debug!("{:?}", response);
futures::future::ok(response)
}
struct TimeRange {
before: Option<i64>,
after: Option<i64>,
}
fn parse_query(query: &str) -> Result<TimeRange, String> {
let args = url::form_urlencoded::parse(&query.as_bytes())
.into_owned()
.collect::<HashMap<String, String>>();
let before = args.get("before").map(|value| value.parse::<i64>());
if let Some(ref result) = before {
if let Err(ref error) = *result {
return Err(format!("Error parsing 'before': {}", error));
}
}
let after = args.get("after").map(|value| value.parse::<i64>());
if let Some(ref result) = after {
if let Err(ref error) = *result {
return Err(format!("Error parsing 'after': {}", error));
}
}
Ok(TimeRange {
before: before.map(|b| b.unwrap()),
after: after.map(|a| a.unwrap()),
})
}
fn make_get_response(
messages: Option<Vec<Message>>,
) -> FutureResult<hyper::Response, hyper::Error> {
let response = match messages {
Some(messages) => {
let body = render_page(messages);
Response::new()
.with_header(ContentLength(body.len() as u64))
.with_body(body)
}
None => Response::new().with_status(StatusCode::InternalServerError),
};
debug!("{:?}", response);
futures::future::ok(response)
}
fn write_to_db(
new_message: NewMessage,
db_connection: &PgConnection,
) -> FutureResult<i64, hyper::Error> {
use schema::messages;
let timestamp = diesel::insert_into(messages::table)
.values(&new_message)
.returning(messages::timestamp)
.get_result(db_connection);
match timestamp {
Ok(timestamp) => futures::future::ok(timestamp),
Err(error) => {
error!("Error writing to database: {}", error.description());
futures::future::err(hyper::Error::from(
io::Error::new(io::ErrorKind::Other, "service error"),
))
}
}
}
fn query_db(time_range: TimeRange, db_connection: &PgConnection) -> Option<Vec<Message>> {
use schema::messages;
let TimeRange { before, after } = time_range;
let query_result = match (before, after) {
(Some(before), Some(after)) => {
messages::table
.filter(messages::timestamp.lt(before as i64))
.filter(messages::timestamp.gt(after as i64))
.load::<Message>(db_connection)
}
(Some(before), _) => {
messages::table
.filter(messages::timestamp.lt(before as i64))
.load::<Message>(db_connection)
}
(_, Some(after)) => {
messages::table
.filter(messages::timestamp.gt(after as i64))
.load::<Message>(db_connection)
}
_ => messages::table.load::<Message>(db_connection),
};
match query_result {
Ok(result) => Some(result),
Err(error) => {
error!("Error querying DB: {}", error);
None
}
}
}
fn render_page(messages: Vec<Message>) -> String {
(html! {
head {
title "microservice"
style "body { font-family: monospace }"
}
body {
ul {
@for message in &messages {
li {
(message.username) " (" (message.timestamp) "): " (message.message)
}
}
}
}
}).into_string()
}
fn main() {
env_logger::init();
let address = "127.0.0.1:8080".parse().unwrap();
let server = hyper::server::Http::new()
.bind(&address, || Ok(Microservice {}))
.unwrap();
info!("Running microservice at {}", address);
server.run().unwrap();
}
|
use criterion::{BenchmarkGroup, Bencher};
use criterion::measurement::WallTime;
use legion::prelude::*;
#[derive(Copy, Clone, Debug, Default)]
pub struct Coordinate(f32, f32);
#[derive(Copy, Clone, Debug, Default)]
pub struct XY(u32, u32);
#[derive(Copy, Clone, Debug, Default)]
pub struct Rounds(u32);
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub struct ShardA;
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub struct ShardB;
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub struct ShardC;
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub struct ShardD;
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub struct ShardE;
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub struct ShardF;
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub struct ShardG;
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub struct ShardH;
pub fn legion_mandelbrot(group: &mut BenchmarkGroup<WallTime>, width: u32, height: u32) {
fn run_mandelbrot(bencher: &mut Bencher<WallTime>, mut world: World) {
let query = <(Read<Coordinate>, Write<Rounds>)>::query();
bencher.iter(|| {
query.par_for_each(&mut world, |(coord, mut rounds)| {
let Coordinate(x0, y0) = *coord;
let (mut x, mut y) = (0., 0.);
let mut i = 0;
while i < 100 && x*x + y*y <= 4. {
let x_temp = x*x - y*y + x0;
y = 2.*x*y + y0;
x = x_temp;
i += 1;
}
*rounds = Rounds(i);
});
});
}
group.bench_function("legion", |bencher| {
let mut world = World::new();
world.insert(
(),
(0..width)
.flat_map(|x| (0..height).map(move |y| (x, y)))
.map(|(x, y)| {
let (xf, yf) = (
x as f32 / width as f32 * 3.5 - 2.5,
y as f32 / height as f32 * 2. - 1.
);
(XY(x, y), Coordinate(xf, yf), Rounds(0))
})
);
run_mandelbrot(bencher, world);
});
group.bench_function("legions-sharded", |bencher| {
let mut world = World::new();
let entities: Vec<_> = world.insert(
(),
(0..width)
.flat_map(|x| (0..height).map(move |y| (x, y)))
.map(|(x, y)| {
let (xf, yf) = (
x as f32 / width as f32 * 3.5 - 2.5,
y as f32 / height as f32 * 2. - 1.
);
(XY(x, y), Coordinate(xf, yf), Rounds(0))
})
).into();
for (i, entity) in entities.into_iter().enumerate() {
let n = i % 256;
if n & 1 != 0 { world.add_component(entity, ShardA); }
if n & 2 != 0 { world.add_component(entity, ShardB); }
if n & 4 != 0 { world.add_component(entity, ShardC); }
if n & 8 != 0 { world.add_component(entity, ShardD); }
if n & 16 != 0 { world.add_component(entity, ShardE); }
if n & 32 != 0 { world.add_component(entity, ShardF); }
if n & 64 != 0 { world.add_component(entity, ShardG); }
if n & 128 != 0 { world.add_component(entity, ShardH); }
}
run_mandelbrot(bencher, world);
});
} |
#[doc = "Reader of register ENC_INTR_EN"]
pub type R = crate::R<u32, super::ENC_INTR_EN>;
#[doc = "Writer for register ENC_INTR_EN"]
pub type W = crate::W<u32, super::ENC_INTR_EN>;
#[doc = "Register ENC_INTR_EN `reset()`'s with value 0"]
impl crate::ResetValue for super::ENC_INTR_EN {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `AUTH_PASS_INTR_EN`"]
pub type AUTH_PASS_INTR_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `AUTH_PASS_INTR_EN`"]
pub struct AUTH_PASS_INTR_EN_W<'a> {
w: &'a mut W,
}
impl<'a> AUTH_PASS_INTR_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `ECB_PROC_INTR_EN`"]
pub type ECB_PROC_INTR_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ECB_PROC_INTR_EN`"]
pub struct ECB_PROC_INTR_EN_W<'a> {
w: &'a mut W,
}
impl<'a> ECB_PROC_INTR_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `CCM_PROC_INTR_EN`"]
pub type CCM_PROC_INTR_EN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CCM_PROC_INTR_EN`"]
pub struct CCM_PROC_INTR_EN_W<'a> {
w: &'a mut W,
}
impl<'a> CCM_PROC_INTR_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
impl R {
#[doc = "Bit 0 - Authentication interrupt enable 0 - Disable 1 - Enable"]
#[inline(always)]
pub fn auth_pass_intr_en(&self) -> AUTH_PASS_INTR_EN_R {
AUTH_PASS_INTR_EN_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - ECB processed interrupt enable 0 - Disable 1 - Enable"]
#[inline(always)]
pub fn ecb_proc_intr_en(&self) -> ECB_PROC_INTR_EN_R {
ECB_PROC_INTR_EN_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - CCM processed interupt enable 0 - Disable 1 - Enable"]
#[inline(always)]
pub fn ccm_proc_intr_en(&self) -> CCM_PROC_INTR_EN_R {
CCM_PROC_INTR_EN_R::new(((self.bits >> 2) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Authentication interrupt enable 0 - Disable 1 - Enable"]
#[inline(always)]
pub fn auth_pass_intr_en(&mut self) -> AUTH_PASS_INTR_EN_W {
AUTH_PASS_INTR_EN_W { w: self }
}
#[doc = "Bit 1 - ECB processed interrupt enable 0 - Disable 1 - Enable"]
#[inline(always)]
pub fn ecb_proc_intr_en(&mut self) -> ECB_PROC_INTR_EN_W {
ECB_PROC_INTR_EN_W { w: self }
}
#[doc = "Bit 2 - CCM processed interupt enable 0 - Disable 1 - Enable"]
#[inline(always)]
pub fn ccm_proc_intr_en(&mut self) -> CCM_PROC_INTR_EN_W {
CCM_PROC_INTR_EN_W { w: self }
}
}
|
use crate::errors::Errcode;
use std::path::PathBuf;
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
#[structopt(name = "crabcan", about = "A simple container in Rust.")]
pub struct Args {
#[structopt(short, long)]
debug: bool,
#[structopt(short, long)]
pub command: String,
#[structopt(short, long)]
pub uid: u32,
#[structopt(parse(from_os_str), short = "m", long = "mount")]
pub mount_dir: PathBuf,
#[structopt(parse(from_os_str), short = "a", long = "add")]
pub addpaths: Vec<PathBuf>,
}
pub fn parse_args() -> Result<Args, Errcode> {
let args = Args::from_args();
if args.debug {
setup_log(log::LevelFilter::Debug);
} else {
setup_log(log::LevelFilter::Info);
}
if !args.mount_dir.exists() || !args.mount_dir.is_dir() {
return Err(Errcode::ArgumentInvalid("mount"));
}
if args.command.is_empty() {
return Err(Errcode::ArgumentInvalid("command"));
}
Ok(args)
}
pub fn setup_log(level: log::LevelFilter) {
env_logger::Builder::from_default_env()
.format_timestamp_secs()
.filter(None, level)
.init();
} |
// Heavily inspired by https://rust-unofficial.github.io/too-many-lists/first-final.html
type More<T> = Option<Box<Node<T>>>;
struct Node<T> {
next: More<T>,
val: T,
}
pub struct Stack<T> {
head: More<T>,
len: i32,
}
impl<T> Stack<T> {
pub fn new() -> Stack<T> {
Stack { head: None, len: 0 }
}
// We pass a reference to the stack - we borrow the stack (don't take ownership)
// This reference needs to be mutable (as we will change the value of the stack's head)
pub fn push(&mut self, val: T) {
self.head = Some(Box::new(Node {
next: self.head.take(),
val: val,
}));
self.len += 1;
}
pub fn pop(&mut self) -> Option<T> {
match self.head.take() {
None => None,
Some(v) => {
self.head = v.next;
self.len -= 1;
Some(v.val)
}
}
}
}
#[cfg(test)]
mod test {
// This is run from the root of the library and we import this as the stack module
use crate::stack::Stack;
#[test]
fn basic_functionality() {
// The stack needs to be mutable as its methods mutate it!
let mut s = Stack::new();
// Both push and pop just borrow the stack. So it is still owned in this function.
// They mutate it though.
assert_eq!(s.len, 0);
assert_eq!(s.pop(), None);
assert_eq!(s.len, 0);
s.push(1);
assert_eq!(s.len, 1);
assert_eq!(s.pop(), Some(1));
assert_eq!(s.len, 0);
assert_eq!(s.pop(), None);
s.push(1);
s.push(2);
assert_eq!(s.len, 2);
assert_eq!(s.pop(), Some(2));
assert_eq!(s.pop(), Some(1));
assert_eq!(s.pop(), None);
// And now s will be freed as we get to the end of its scope
}
#[test]
fn is_generic() {
let mut s = Stack::new();
assert_eq!(s.pop(), None);
s.push("blah");
assert_eq!(s.pop(), Some("blah"));
assert_eq!(s.pop(), None);
}
}
|
use crate::aoc_utils::read_input;
use std::collections::HashMap;
pub fn run(input_filename: &str) {
let input = read_input(input_filename);
part1(&input);
part2(&input);
}
fn value_with_bitmask(value: u64, bitmask: &str) -> u64 {
let bits: String = format!("{:0>36}", format!("{:b}", value));
let mut result_bits: String = String::from("");
for (index, char) in bitmask.chars().into_iter().enumerate() {
let mut result_char = bits.chars().nth(index).unwrap_or('0');
if char != 'X' {
result_char = char;
}
result_bits.push(result_char);
}
return u64::from_str_radix(result_bits.as_str(), 2).unwrap_or(0);
}
fn part1(input: &String) {
let mut memory: HashMap<u64, u64> = HashMap::new();
let mut bitmask = "";
for line in input.lines() {
let line_vec: Vec<&str> = line.split(" = ").collect();
assert_eq!(line_vec.len(), 2);
let operation = line_vec[0];
let value = line_vec[1];
if operation == "mask" {
bitmask = value;
} else {
let addr = operation
.replace("mem[", "")
.replace("]", "")
.parse::<u64>()
.unwrap_or(0);
let mem = memory.entry(addr).or_insert(0);
*mem = value_with_bitmask(value.parse::<u64>().unwrap_or(0), bitmask);
}
}
let mut sum = 0;
for (_mem_addr, mem_value) in memory {
sum += mem_value;
}
println!("Part 1: {}", sum);
}
fn set_memory_recursive(address_binary: String, value: u64) -> HashMap<u64, u64> {
let mut result: HashMap<u64, u64> = HashMap::new();
if address_binary.contains('X') {
result.extend(set_memory_recursive(
String::from(address_binary.replacen('X', "0", 1)),
value,
));
result.extend(set_memory_recursive(
String::from(address_binary.replacen('X', "1", 1)),
value,
));
} else {
let mem = u64::from_str_radix(address_binary.as_str(), 2).unwrap_or(0);
result.insert(mem, value);
}
return result;
}
fn value_with_bitmask_part2(addr: u64, value: u64, bitmask: &str) -> HashMap<u64, u64> {
let bits: String = format!("{:0>36}", format!("{:b}", addr));
let mut result_bits: String = String::from("");
let mut result: HashMap<u64, u64> = HashMap::new();
for (index, char) in bitmask.chars().into_iter().enumerate() {
let mut result_char = bits.chars().nth(index).unwrap_or('0');
if char == '1' {
result_char = '1';
}
if char == 'X' {
result_char = 'X';
}
result_bits.push(result_char);
}
result.extend(set_memory_recursive(result_bits, value));
return result;
}
fn part2(input: &String) {
let mut memory: HashMap<u64, u64> = HashMap::new();
let mut bitmask = "";
for line in input.lines() {
let line_vec: Vec<&str> = line.split(" = ").collect();
assert_eq!(line_vec.len(), 2);
let operation = line_vec[0];
let value = line_vec[1];
if operation == "mask" {
bitmask = value;
} else {
let addr = operation
.replace("mem[", "")
.replace("]", "")
.parse::<u64>()
.unwrap_or(0);
memory.extend(value_with_bitmask_part2(
addr,
value.parse::<u64>().unwrap_or(0),
bitmask,
))
}
}
let mut sum = 0;
for (_mem_addr, mem_value) in memory {
sum += mem_value;
}
println!("Part 2: {}", sum);
}
|
use std::io::{Error, Read, Write};
use std::net::{TcpListener, TcpStream};
use std::thread;
fn hex_dump(size: usize, source: &[u8]) {
for x in source.iter().take(size) {
print!("{:02x} ", x);
}
println!("");
}
fn handler(mut stream: TcpStream) -> Result<(), Error> {
println!("[*] Connection from {}", stream.peer_addr().unwrap());
let mut buf = [0u8; 1024];
loop {
let nbytes = stream.read(&mut buf)?;
if nbytes == 0 {
return Ok(());
}
print!("[*] Size: {} buf len: {} Message: {}", nbytes, &buf.len(), std::str::from_utf8(&buf).unwrap());
hex_dump(nbytes, &buf);
stream.write(&buf)?;
stream.flush()?;
buf = [0u8; 1024];
}
}
fn main() {
let listener = TcpListener::bind("127.0.0.1:4444").expect("Cannot bind to port 4444.");
println!("[*] Listening on port 4444...");
for stream in listener.incoming() {
match stream {
Ok(stream) => {
thread::spawn(|| {
handler(stream).unwrap_or_else(|error| eprintln!("[!] Error: {:?}", error));
});
},
Err(e) => { println!("[!] Error: {}", e) }
}
}
}
|
use super::state::LSystem;
/// Enumerates all the commands needed for a Turtle-type rendering of an
/// L-System's state.
/// Only 2D is supported for now.
#[derive(Clone, Debug, PartialEq)]
pub enum TurtleCommand {
/// Advance the turtle by a certain amount (forward if this amount is
/// positive, backward otherwise), in pixels by default.
AdvanceBy(f32),
/// Rotate the turtle by a certain angle, in degrees by default.
RotateBy(f32),
/// Push (save) the current turtle state (position and angle) to the stack.
PushState,
/// Pop (restore) the last turtle state from the stack.
PopState,
/// Do nothing command (useful for text-only tests).
None,
}
unsafe impl Send for TurtleCommand {}
/// L-System interpreters translate the state of an L-System into a sequence
/// of drawing instructions in order to represent it (think Turtle graphics
/// from Logo).
pub trait LInterpreter<S: Clone + Eq> {
fn interpret(&mut self, lsystem: &LSystem<S>) -> Result<Vec<TurtleCommand>, String>;
}
/// Simple, linear L-System interpreter.
/// NB: can rapidly freeze its container thread.
pub struct SimpleInterpreter;
impl<S> LInterpreter<S> for SimpleInterpreter
where S: Clone + Eq
{
fn interpret(&mut self, lsystem: &LSystem<S>) -> Result<Vec<TurtleCommand>, String> {
let rules = lsystem.rules();
let mut commands = Vec::with_capacity(lsystem.state().len());
for s in lsystem.state() {
match rules.interpretation(&s) {
Some(command) => {
match *command {
TurtleCommand::None => (), // save memory
_ => commands.push(command.clone()),
}
}
None => (),
}
}
commands.shrink_to_fit();
Ok(commands)
}
}
#[cfg(test)]
mod test {
use rules::HashMapRules;
use state::{LSystem, new_rules_value};
use process::{LProcessor, SimpleProcessor};
use super::{LInterpreter, SimpleInterpreter};
use super::TurtleCommand::*;
#[test]
fn simple_interpreter() {
let mut rules: HashMapRules<char> = HashMapRules::new();
rules.set_str('A', "+B-A-B+", AdvanceBy(10f32));
rules.set_str('B', "−A+B+A−", AdvanceBy(15f32));
rules.set_str('+', "+", RotateBy(60f32));
rules.set_str('-', "-", RotateBy(-60f32));
let mut lsystem = LSystem::new_with_char("A", new_rules_value(rules));
let at_iteration = 1;
let expected_commands = [RotateBy(60.0),
AdvanceBy(15.0),
RotateBy(-60.0),
AdvanceBy(10.0),
RotateBy(-60.0),
AdvanceBy(15.0),
RotateBy(60.0)];
for _ in 0..at_iteration {
lsystem = SimpleProcessor.iterate(&lsystem).ok().unwrap();
}
assert_eq!(lsystem.iteration(), at_iteration);
let commands = SimpleInterpreter.interpret(&lsystem).ok().unwrap();
assert_eq!(commands.len(), expected_commands.len());
for i in 0..commands.len() {
assert_eq!(commands[i], expected_commands[i]);
}
}
}
|
extern crate regex;
extern crate reqwest;
use fuzzy_matcher::skim::fuzzy_match;
use regex::Regex;
use rusqlite::types::ToSql;
use rusqlite::{Connection, NO_PARAMS};
use serde::{Deserialize};
use std::cmp::max;
use std::collections::{HashMap, HashSet};
use std::error::Error;
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
use std::{io, thread, time};
pub fn run() -> Result<(), Box<dyn Error>> {
println!("Running magic_drafter, start a draft run in arena.");
let f = File::open("output_log.txt")?;
let mut reader = BufReader::new(f);
let re = Regex::new(r"<== Draft\.MakePick(?s).*?(\{.*?\})")?;
loop {
let mut line = String::new();
if reader.read_to_string(&mut line)? > 0 {
if let Some(ref m) = re.captures_iter(&line).last() {
let pick: DraftPick = serde_json::from_str(&m[1]).unwrap();
println!("{:?}", pick.draftPack)
}
} else {
println!("waiting...");
thread::sleep(time::Duration::from_millis(5000));
}
}
Ok(())
}
#[derive(Deserialize, Debug)]
struct Card {
id: String,
name: String,
arena_id: u32,
set_name: String,
card_rank: String,
}
#[derive(Deserialize, Debug)]
struct DraftPick {
// playerId: String,
// eventName: String,
// draftId: String,
// draftStatus: String,
// packNumber: u32,
// pickNumber: u32,
draftPack: Vec<String>,
pickedCards: Vec<String>,
// requestUnits: f32
}
#[derive(Deserialize, Debug)]
struct ScryfallCard {
id: String,
name: String,
arena_id: u32,
set_name: String,
}
pub fn init_db() -> Result<(), Box<dyn Error>> {
println!("Initialising collection...");
let conn = Connection::open("test.db")?;
conn.execute(
"CREATE TABLE IF NOT EXISTS card (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
scryfallId TEXT NOT NULL,
cardSet TEXT NOT NULL,
cardRank TEXT
)",
NO_PARAMS,
)?;
let mut stmt = conn.prepare("SELECT id FROM card")?;
let existing_cards: HashSet<_> = stmt
.query_map::<u32, _, _>(NO_PARAMS, |row| row.get(0))?
.map(|x| x.unwrap()) // This doesn't feel idiomatic
.collect();
let ranks = pull_latest_card_definitions()?;
let cards = fetch_arena_cards()?;
insert_card_defs(
&conn,
cards.difference(&existing_cards).cloned().collect(),
&ranks,
)?;
println!("done.");
Ok(())
}
pub fn pull_latest_card_definitions() -> Result<HashMap<String, String>, Box<dyn Error>> {
let res = reqwest::get("https://docs.google.com/spreadsheets/d/1BAPtQv4U9KUAtVzkccJlPS8cb0s_uOcGEDORip5uaQg/gviz/tq?headers=0&sheet=Staging%20Sheet&tq=select+A,D")?.text()?;
let re = Regex::new(r#"c[^v]+v.{3}([^"]+)".{8}([^"]+)"#)?;
let card_ranks = re
.captures_iter(&res)
.map(|x| {
(
str::replace(&x[1].to_owned(), "\\u0027", "'"),
x[2].to_owned(),
)
})
.collect::<HashMap<String, String>>(); // turbo-fish syntax >::() - very fishy
Ok(card_ranks) // Is there a way to automatically return Result without needing Ok(item)?
}
fn fetch_arena_cards() -> Result<HashSet<u32>, Box<dyn Error>> {
let res =
reqwest::get("https://raw.githubusercontent.com/mtgatracker/node-mtga/master/mtga/m20.js")?
.text()?;
let re = Regex::new(r"mtgaID: (\d+), ")?;
let records: HashSet<u32> = re
.captures_iter(&res)
.map(|x| x[1].parse::<u32>().unwrap())
.collect();
Ok(records)
}
fn fetch_card_details(id: u32) -> Result<ScryfallCard, reqwest::Error> {
reqwest::get(&format!("https://api.scryfall.com/cards/arena/{}", id))?.json()
}
// This should return a vector of cards but haven't figured a way to do multi-insert with rusqlite
fn insert_card_defs(
conn: &Connection,
card_ids: HashSet<u32>,
card_ranks: &HashMap<String, String>,
) -> Result<(), Box<dyn Error>> {
for id in card_ids {
let card = match fetch_card_details(id) {
Ok(x) => x,
_ => {
println!(
"Unable to retrieve card details with id {}, skipping...",
id
);
continue;
}
};
let card_rank = get_closest_match(&card.name, &card_ranks);
conn.execute(
"INSERT INTO card (id, name, scryfallId, cardSet, cardRank)
VALUES (?1, ?2, ?3, ?4, ?5)",
&[
&card.arena_id as &dyn ToSql,
&card.name,
&card.id,
&card.set_name,
&card_rank,
],
)
.unwrap();
println!("{:?}{:?}", card, card_rank);
thread::sleep(time::Duration::from_secs(1)); // be a good citizen
}
Ok(())
}
fn get_closest_match<'a>(card_name: &str, card_ranks: &'a HashMap<String, String>) -> &'a str {
let result = card_ranks.into_iter().fold((0, ""), |acc, x| {
let max = max(
acc.0,
match fuzzy_match(&card_name, x.0) {
Some(y) => y,
None => 0,
},
);
if max > acc.0 {
(max, x.1)
} else {
acc
}
});
result.1
}
|
extern crate x11_input_mirror;
use x11_input_mirror::*;
use config::{GrabberConfig, SecurityConfig};
use connector::Connector;
use utils::{encode_u16, need_dep};
use std::thread::sleep;
use std::time::Duration;
fn main() {
pretty_panic::set();
need_dep("xdotool");
need_dep("xinput");
let config = GrabberConfig::load();
let max_ping = Duration::from_millis(config.max_ping_ms);
let sec_config = SecurityConfig::load();
let password = sec_config.password;
if password.len() < 12 {
panic!("Password must have at least 12 characters")
}
let mut conns = Connector::connect(config.servers, max_ping, &password);
let mouse_rx = mouse::spawn_thread(config.mouse_interval_ms);
let keyboard_and_clicks_rx =
keyboard_and_clicks::spawn_thread(config.keyboard_and_clicks_interval_ms);
println!("Started successfully");
let mut buf = [0u8; 16];
loop {
for msg in mouse_rx.try_iter() {
buf[0] = 100;
buf[1..3].copy_from_slice(&encode_u16(msg.x));
buf[3..5].copy_from_slice(&encode_u16(msg.y));
conns.write(buf);
}
for msg in keyboard_and_clicks_rx.try_iter() {
use keyboard_and_clicks::EventKind::*;
buf[0] = match msg.kind {
KeyDown | KeyUp => 101,
MouseDown | MouseUp => 102,
};
buf[1] = match msg.kind {
KeyDown | MouseDown => 1,
KeyUp | MouseUp => 2,
};
buf[2] = msg.code;
buf[3..5].copy_from_slice(&encode_u16(msg.x));
buf[5..7].copy_from_slice(&encode_u16(msg.y));
conns.write(buf);
}
sleep(MAIN_LOOP_INTERVAL);
}
}
|
use crate::{
lisp_object::{
EvalError,
LispObject,
NativeDef,
},
lisp_object_util::{
as_numbers,
},
};
fn add(args: &[LispObject]) -> Result<LispObject, EvalError> {
let terms = args[0].as_list()?;
as_numbers(&terms)
.map(|args| LispObject::Number(args.iter().fold(0.0, |sum, a| sum + a)))
.map_err(|(err, index)| err.trace(index + 1))
}
pub const ADD: NativeDef = NativeDef {
name: "+",
positional: &[],
rest: Some("terms"),
func: add,
};
fn multiply(args: &[LispObject]) -> Result<LispObject, EvalError> {
let factors = args[0].as_list()?;
as_numbers(&factors)
.map(|args| LispObject::Number(args.iter().fold(1.0, |sum, a| sum * a)))
.map_err(|(err, index)| err.trace(index + 1))
}
pub const MULTIPLY: NativeDef = NativeDef {
name: "*",
positional: &[],
rest: Some("factors"),
func: multiply
};
fn subtract(args: &[LispObject]) -> Result<LispObject, EvalError> {
let min = args[0].as_number()
.map_err(|err| err.trace(1))?;
let subs = args[1].as_list()?;
let sub = as_numbers(&subs)
.map(|args| args.iter().fold(0.0, |sum, a| sum + a))
.map_err(|(err, index)| err.trace(index + 2))?;
Ok(LispObject::Number(min - sub))
}
pub const SUBTRACT: NativeDef = NativeDef {
name: "-",
positional: &["min"],
rest: Some("subs"),
func: subtract,
};
fn equal(args: &[LispObject]) -> Result<LispObject, EvalError> {
match args[0] {
LispObject::Number(op0) => {
let op1 = args[1].as_number()
.map_err(|e| e.trace(2))?;
Ok(LispObject::Bool(op0 == op1))
}
LispObject::Symbol(op0) => {
let op1 = args[1].as_symbol()
.map_err(|e| e.trace(2))?;
Ok(LispObject::Bool(op0 == op1))
}
_ => Err(EvalError::new("equal not implemented for type".to_string()).trace(1)),
}
}
pub const EQUAL: NativeDef = NativeDef {
name: "=",
positional: &["o1", "o2"],
rest: None,
func: equal,
};
fn first(args: &[LispObject]) -> Result<LispObject, EvalError> {
let lst = args[0].as_list()?;
Ok(lst[0].clone())
}
pub const FIRST: NativeDef = NativeDef {
name: "first",
positional: &["lst"],
rest: None,
func: first,
};
fn rest(args: &[LispObject]) -> Result<LispObject, EvalError> {
let lst = args[0].as_list()?;
let res = if lst.len() > 0 {
lst[1..].to_vec()
} else {
vec![]
};
Ok(LispObject::List(res))
}
pub const REST: NativeDef = NativeDef {
name: "rest",
positional: &["lst"],
rest: None,
func: rest,
};
fn list(args: &[LispObject]) -> Result<LispObject, EvalError> {
Ok(LispObject::List(args[0].as_list()?))
}
pub const LIST: NativeDef = NativeDef {
name: "list",
positional: &[],
rest: Some("elems"),
func: list,
};
fn concat(args: &[LispObject]) -> Result<LispObject, EvalError> {
Ok(LispObject::List(
args[0].as_list()?.into_iter().enumerate()
.map(|(index, elem)| elem.into_list()
.map_err(|e| e.trace(index + 1)))
.collect::<Result<Vec<Vec<LispObject>>, EvalError>>()?
.concat()
))
}
pub const CONCAT: NativeDef = NativeDef {
name: "concat",
positional: &[],
rest: Some("lsts"),
func: concat,
};
fn is_list(args: &[LispObject]) -> Result<LispObject, EvalError> {
Ok(LispObject::Bool(matches!(args[0], LispObject::List(_))))
}
pub const IS_LIST: NativeDef = NativeDef {
name: "is-list",
positional: &["lst"],
rest: None,
func: is_list,
};
fn length(args: &[LispObject]) -> Result<LispObject, EvalError> {
Ok(LispObject::Number(args[0].as_list()?.len() as f64))
}
pub const LENGTH: NativeDef = NativeDef {
name: "length",
positional: &["lst"],
rest: None,
func: length,
};
|
pub mod blackbox;
pub mod deterministic;
pub mod externalities;
use crate::node::{InternalNode, TestRuntimeRequirements};
pub fn blackbox_external<R: TestRuntimeRequirements>(url: &str) -> blackbox::BlackBox<R> {
blackbox::BlackBox::new(blackbox::BlackBoxNode::External(url.into()))
}
pub fn blackbox_internal<R: TestRuntimeRequirements>(node: InternalNode<R>) -> blackbox::BlackBox<R> {
blackbox::BlackBox::new(blackbox::BlackBoxNode::Internal(node))
}
pub fn deterministic<R: TestRuntimeRequirements + 'static>(node: InternalNode<R>) -> deterministic::Deterministic<R> {
deterministic::Deterministic::new(node)
}
|
extern crate failure;
extern crate regex;
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::{BufRead, BufReader};
use failure::Error;
#[derive(Copy, Clone, Debug, PartialEq)]
struct Coordinate {
x: i32,
y: i32,
}
#[derive(Copy, Clone, Debug)]
struct Cell {
closest: usize,
distance: i32,
}
fn distance((x1, y1): (i32, i32), (x2, y2): (i32, i32)) -> i32 {
(x1 - x2).abs() + (y1 - y2).abs()
}
fn main() -> Result<(), Error> {
let args: Vec<String> = env::args().collect();
let file = File::open(&args[1])?;
let locations: Vec<Coordinate> = BufReader::new(file)
.lines()
.map(|l| l.expect("file read failed"))
.map(|l| {
l.split(", ")
.map(|s| s.to_string())
.collect::<Vec<String>>()
})
.map(|parts| Coordinate {
x: parts[0].parse().unwrap(),
y: parts[1].parse().unwrap(),
})
.collect();
// Let's make a square grid large enough to hold all the locations.
let max_x = locations.iter().map(|l| l.x).max().unwrap();
let max_y = locations.iter().map(|l| l.y).max().unwrap();
let side = max_x.max(max_y) as usize;
let mut grid: Vec<Vec<Option<Cell>>> = vec![vec![None; side]; side];
let mut areas = HashMap::new();
// Naively blast over the grid and brute force the nearest point.
for x in 0..side {
for y in 0..side {
let cell = (x as i32, y as i32);
let distances: Vec<_> = locations
.iter()
.enumerate()
.map(|(index, coord)| (index, distance(cell, (coord.x, coord.y))))
.collect();
let (min_index, min_distance) = distances
.iter()
.min_by_key(|(_, dist)| dist)
.unwrap()
.clone();
if distances
.iter()
.filter(|(_, dist)| *dist == min_distance)
.count()
== 1
{
grid[x][y] = Some(Cell {
closest: min_index,
distance: min_distance,
});
*areas.entry(min_index).or_insert(0) += 1;
}
}
}
// Eliminate zones touching the edges.
for x in 0..side {
if let Some(Cell { closest, .. }) = grid[x][0] {
areas.remove(&closest);
}
if let Some(Cell { closest, .. }) = grid[x][side - 1] {
areas.remove(&closest);
}
}
for y in 0..side {
if let Some(Cell { closest, .. }) = grid[0][y] {
areas.remove(&closest);
}
if let Some(Cell { closest, .. }) = grid[side - 1][y] {
areas.remove(&closest);
}
}
let answer = areas.iter().max_by_key(|(_, &area)| area).unwrap();
println!("first answer: {}", answer.1);
// Do it again, but with total distance this time.
let mut area = 0;
for x in 0..side {
for y in 0..side {
let cell = (x as i32, y as i32);
let total_distance: i32 = locations
.iter()
.map(|coord| distance(cell, (coord.x, coord.y)))
.sum();
if total_distance < 10000 {
area += 1;
}
}
}
println!("second answer: {}", area);
Ok(())
}
|
//! A library for parsing debuginfo.
//!
//! ## Example usage
//!
//! ```rust,no_run
//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
//! # let a_file_path = String::new();
//! let ctx = ddbug_parser::File::parse(a_file_path)?;
//! let file = ctx.file();
//! for unit in file.units() {
//! for function in unit.functions() {
//! if let Some(name) = function.name() {
//! println!("{}", name);
//! }
//! }
//! }
//! Ok(())
//! }
//! ```
#![deny(missing_docs)]
// Enable some rust 2018 idioms.
#![warn(bare_trait_objects)]
#![warn(unused_extern_crates)]
// Calm down clippy.
#![allow(clippy::single_match)]
#![allow(clippy::match_single_binding)]
#![allow(clippy::too_many_arguments)]
#[macro_use]
extern crate log;
mod cfi;
mod file;
mod function;
mod location;
mod namespace;
mod range;
mod source;
mod types;
mod unit;
mod variable;
pub use crate::cfi::*;
pub use crate::file::*;
pub use crate::function::*;
pub use crate::location::*;
pub use crate::namespace::*;
pub use crate::range::*;
pub use crate::source::*;
pub use crate::types::*;
pub use crate::unit::*;
pub use crate::variable::*;
use std::borrow::{Borrow, Cow};
use std::error;
use std::fmt;
use std::io;
use std::result;
use std::sync::atomic::{AtomicUsize, Ordering};
/// A parsing error.
#[derive(Debug)]
pub struct Error(pub Cow<'static, str>);
impl error::Error for Error {
fn description(&self) -> &str {
self.0.borrow()
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl From<&'static str> for Error {
fn from(s: &'static str) -> Error {
Error(Cow::Borrowed(s))
}
}
impl From<String> for Error {
fn from(s: String) -> Error {
Error(Cow::Owned(s))
}
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Error {
Error(Cow::Owned(format!("IO error: {}", e)))
}
}
impl From<gimli::Error> for Error {
fn from(e: gimli::Error) -> Error {
Error(Cow::Owned(format!("DWARF error: {}", e)))
}
}
impl From<object::Error> for Error {
fn from(e: object::Error) -> Error {
Error(Cow::Owned(format!("object error: {}", e)))
}
}
/*
impl From<crate_pdb::Error> for Error {
fn from(e: crate_pdb::Error) -> Error {
Error(Cow::Owned(format!("PDB error: {}", e)))
}
}
*/
/// A parsing result.
pub type Result<T> = result::Result<T, Error>;
mod address {
use std::u64;
/// An optional address.
///
/// This is similar to `Option<u64>`, but uses `!0` to encode the `None` case.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct Address(u64);
impl Address {
/// Create a known address value.
#[inline]
pub fn new(address: u64) -> Address {
debug_assert!(Address(address) != Address::none());
Address(address)
}
/// Create an unknown or absent address value.
#[inline]
pub fn none() -> Address {
Address(!0)
}
/// Return true if the address is unknown or absent.
#[inline]
pub fn is_none(self) -> bool {
self == Self::none()
}
/// Return true if the address is known.
#[inline]
pub fn is_some(self) -> bool {
self != Self::none()
}
/// Return the address.
#[inline]
pub fn get(self) -> Option<u64> {
if self.is_none() {
None
} else {
Some(self.0)
}
}
}
impl Default for Address {
#[inline]
fn default() -> Self {
Address::none()
}
}
}
pub use crate::address::Address;
mod size {
use std::u64;
/// An optional size.
///
/// This is similar to `Option<u64>`, but uses `u64::MAX` to encode the `None` case.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct Size(u64);
impl Size {
/// Create a known size value.
#[inline]
pub fn new(size: u64) -> Size {
debug_assert!(Size(size) != Size::none());
Size(size)
}
/// Create an unknown or absent size value.
#[inline]
pub fn none() -> Size {
Size(u64::MAX)
}
/// Return true if the size is unknown or absent.
#[inline]
pub fn is_none(self) -> bool {
self == Self::none()
}
/// Return true if the size is known.
#[inline]
pub fn is_some(self) -> bool {
self != Self::none()
}
/// Return the size.
#[inline]
pub fn get(self) -> Option<u64> {
if self.is_none() {
None
} else {
Some(self.0)
}
}
}
impl Default for Size {
#[inline]
fn default() -> Self {
Size::none()
}
}
impl From<Option<u64>> for Size {
fn from(size: Option<u64>) -> Size {
match size {
Some(size) => Size::new(size),
None => Size::none(),
}
}
}
}
pub use crate::size::Size;
#[derive(Debug, Default)]
struct Id(AtomicUsize);
impl Clone for Id {
fn clone(&self) -> Self {
Id(AtomicUsize::new(self.get()))
}
}
impl Id {
fn new(id: usize) -> Self {
Id(AtomicUsize::new(id))
}
fn get(&self) -> usize {
self.0.load(Ordering::Acquire)
}
fn set(&self, id: usize) {
self.0.store(id, Ordering::Release)
}
}
|
use gl;
#[allow(non_camel_case_types)]
#[derive(Debug, Copy, Clone)]
#[repr(C, packed)]
pub struct f32_f32_f32 {
pub d0: f32,
pub d1: f32,
pub d2: f32,
}
impl f32_f32_f32 {
pub fn new(d0: f32, d1: f32, d2: f32) -> Self {
f32_f32_f32 { d0, d1, d2 }
}
pub unsafe fn vertex_attrib_pointer(
gl: &gl::Gl,
location: usize,
stride: usize,
offset: usize,
) {
gl.EnableVertexAttribArray(location as gl::types::GLuint);
gl.VertexAttribPointer(
location as gl::types::GLuint,
3,
gl::FLOAT,
gl::FALSE,
stride as gl::types::GLint,
offset as *const gl::types::GLvoid,
);
}
}
impl From<(f32, f32, f32)> for f32_f32_f32 {
fn from(other: (f32, f32, f32)) -> Self {
f32_f32_f32::new(other.0, other.1, other.2)
}
}
// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
#[allow(non_camel_case_types)]
#[derive(Debug, Copy, Clone)]
#[repr(C, packed)]
pub struct u2_u10_u10_u10_rev_float {
pub inner: vec_2_10_10_10::Vector,
}
impl From<(f32, f32, f32, f32)> for u2_u10_u10_u10_rev_float {
fn from(other: (f32, f32, f32, f32)) -> Self {
let (x, y, z, w) = other;
u2_u10_u10_u10_rev_float {
inner: vec_2_10_10_10::Vector::new(x, y, z, w),
}
}
}
impl u2_u10_u10_u10_rev_float {
pub unsafe fn vertex_attrib_pointer(
gl: &gl::Gl,
location: usize,
stride: usize,
offset: usize,
) {
gl.EnableVertexAttribArray(location as gl::types::GLuint);
gl.VertexAttribPointer(
location as gl::types::GLuint,
4,
gl::UNSIGNED_INT_2_10_10_10_REV,
gl::TRUE,
stride as gl::types::GLint,
offset as *const gl::types::GLvoid,
);
}
}
// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
#[allow(non_camel_case_types)]
#[derive(Debug, Copy, Clone)]
#[repr(C, packed)]
pub struct gl_i8 {
pub d0: i8,
}
impl From<i8> for gl_i8 {
fn from(d0: i8) -> Self {
gl_i8 { d0 }
}
}
impl gl_i8 {
pub unsafe fn vertex_attrib_pointer(
gl: &gl::Gl,
location: usize,
stride: usize,
offset: usize,
) {
gl.EnableVertexAttribArray(location as gl::types::GLuint);
gl.VertexAttribPointer(
location as gl::types::GLuint,
1,
gl::BYTE,
gl::FALSE,
stride as gl::types::GLint,
offset as *const gl::types::GLvoid,
);
}
}
// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
#[allow(non_camel_case_types)]
#[derive(Debug, Copy, Clone)]
#[repr(C, packed)]
pub struct gl_i8_float {
pub d0: i8,
}
impl From<i8> for gl_i8_float {
fn from(d0: i8) -> Self {
gl_i8_float { d0 }
}
}
impl gl_i8_float {
pub unsafe fn vertex_attrib_pointer(
gl: &gl::Gl,
location: usize,
stride: usize,
offset: usize,
) {
gl.EnableVertexAttribArray(location as gl::types::GLuint);
gl.VertexAttribPointer(
location as gl::types::GLuint,
1,
gl::BYTE,
gl::TRUE,
stride as gl::types::GLint,
offset as *const gl::types::GLvoid,
);
}
}
|
#[doc = "Register `QUADSPI_LPTR` reader"]
pub type R = crate::R<QUADSPI_LPTR_SPEC>;
#[doc = "Register `QUADSPI_LPTR` writer"]
pub type W = crate::W<QUADSPI_LPTR_SPEC>;
#[doc = "Field `TIMEOUT` reader - TIMEOUT"]
pub type TIMEOUT_R = crate::FieldReader<u16>;
#[doc = "Field `TIMEOUT` writer - TIMEOUT"]
pub type TIMEOUT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 16, O, u16>;
impl R {
#[doc = "Bits 0:15 - TIMEOUT"]
#[inline(always)]
pub fn timeout(&self) -> TIMEOUT_R {
TIMEOUT_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15 - TIMEOUT"]
#[inline(always)]
#[must_use]
pub fn timeout(&mut self) -> TIMEOUT_W<QUADSPI_LPTR_SPEC, 0> {
TIMEOUT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "QUADSPI low-power timeout register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`quadspi_lptr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`quadspi_lptr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct QUADSPI_LPTR_SPEC;
impl crate::RegisterSpec for QUADSPI_LPTR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`quadspi_lptr::R`](R) reader structure"]
impl crate::Readable for QUADSPI_LPTR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`quadspi_lptr::W`](W) writer structure"]
impl crate::Writable for QUADSPI_LPTR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets QUADSPI_LPTR to value 0"]
impl crate::Resettable for QUADSPI_LPTR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//! Test implementation of custom link functions
use anyhow::Result;
use approx::assert_abs_diff_eq;
use ndarray::{array, Array1, Axis};
use ndarray_glm::{
link::{Link, Transform},
num::Float,
Linear, ModelBuilder,
};
#[test]
fn linear_with_lin_transform() -> Result<()> {
// A linear transformation for simplicity.
struct LinTran {}
impl Link<Linear<LinTran>> for LinTran {
fn func<F: Float>(y: F) -> F {
F::from(2.5).unwrap() * y - F::from(3.4).unwrap()
}
fn func_inv<F: Float>(lin_pred: F) -> F {
(lin_pred + F::from(3.4).unwrap()) * F::from(0.4).unwrap()
}
}
assert_abs_diff_eq!(
LinTran::func(LinTran::func_inv(0.45)),
0.45,
epsilon = 4. * f64::EPSILON
);
impl Transform for LinTran {
fn nat_param<F: Float>(lin_pred: Array1<F>) -> Array1<F> {
lin_pred.mapv_into(Self::func_inv)
}
fn d_nat_param<F: Float>(lin_pred: &Array1<F>) -> Array1<F> {
Array1::<F>::from_elem(lin_pred.len(), F::from(0.4).unwrap())
}
}
let beta = array![-0.2, 0.7];
let data_x = array![-1.5, -1.2, -0.8, -0.8, -0.5, -0.2, -0.2, 0.3, 0.3, 0.7, 0.9, 1.2, 1.2];
let mut data_y = data_x.mapv(|x| LinTran::func_inv(beta[0] + beta[1] * x));
// some x points are redundant, and Gaussian errors are symmetric, so some
// pairs of points can be moved off of the exact fit without affecting the
// result.
data_y[2] += 0.3;
data_y[3] -= 0.3;
data_y[5] -= 0.2;
data_y[6] += 0.2;
data_y[7] += 0.4;
data_y[8] -= 0.4;
data_y[11] -= 0.3;
data_y[12] += 0.3;
// Change X data into a 2D array
let data_x = data_x.insert_axis(Axis(1));
let model = ModelBuilder::<Linear<LinTran>>::data(data_y.view(), data_x.view()).build()?;
let fit = model.fit()?;
dbg!(fit.n_iter);
dbg!(&fit.result);
dbg!(&beta);
assert_abs_diff_eq!(fit.result, beta, epsilon = 16.0 * f64::EPSILON);
Ok(())
}
#[test]
fn linear_with_cubic() -> Result<()> {
// An adjusted cube root link function to test on Linear regression. This
// fits to y ~ (a + b*x)^3. If the starting guess is zero this fails to
// converge because the derivative of the link function is zero at the
// origin.
struct Cbrt {}
impl Link<Linear<Cbrt>> for Cbrt {
fn func<F: Float>(y: F) -> F {
y.cbrt()
}
fn func_inv<F: Float>(lin_pred: F) -> F {
lin_pred.powi(3)
}
}
assert_abs_diff_eq!(
Cbrt::func(Cbrt::func_inv(0.45)),
0.45,
epsilon = 4. * f64::EPSILON
);
impl Transform for Cbrt {
fn nat_param<F: Float>(lin_pred: Array1<F>) -> Array1<F> {
lin_pred.mapv_into(|w| w.powi(3))
}
fn d_nat_param<F: Float>(lin_pred: &Array1<F>) -> Array1<F> {
let three = F::from(3.).unwrap();
lin_pred.mapv(|w| three * w.powi(2))
}
}
type TestLink = Cbrt;
let beta = array![-0.2, 0.7];
let data_x = array![-1.5, -1.2, -0.8, -0.8, -0.5, -0.2, -0.2, 0.3, 0.3, 0.7, 0.9, 1.2, 1.2];
let mut data_y = data_x.mapv(|x| TestLink::func_inv(beta[0] + beta[1] * x));
// some x points are redundant, and Gaussian errors are symmetric, so some
// pairs of points can be moved off of the exact fit without affecting the
// result.
data_y[2] += 0.3;
data_y[3] -= 0.3;
data_y[5] -= 0.2;
data_y[6] += 0.2;
data_y[7] += 0.4;
data_y[8] -= 0.4;
data_y[11] -= 0.3;
data_y[12] += 0.3;
// Change X data into a 2D array
let data_x = data_x.insert_axis(Axis(1));
let model = ModelBuilder::<Linear<TestLink>>::data(data_y.view(), data_x.view()).build()?;
eprintln!("Built model");
let fit = model.fit()?;
dbg!(fit.n_iter);
dbg!(&fit.result);
dbg!(&beta);
assert_abs_diff_eq!(fit.result, beta, epsilon = f32::EPSILON as f64);
Ok(())
}
|
mod alert;
mod attack;
mod attack_result;
mod board;
mod coordinates;
mod cursor;
mod faction;
mod game;
mod heading;
mod label;
mod scores;
mod ship;
pub use self::{
alert::Alert, alert::Level, attack::Attack, attack_result::AttackResult, board::Board,
coordinates::Coordinates, cursor::Cursor, faction::Faction, game::Game, heading::Heading,
label::Label, scores::Scores, ship::Ship,
};
|
#![cfg(all(test, feature = "moz_central"))]
use ress::*;
use std::fs::read_to_string;
use std::path::{Path, PathBuf};
#[test]
fn moz_central() {
let _ = pretty_env_logger::try_init();
let moz_central_path = Path::new("moz_central");
if !moz_central_path.exists() {
panic!("please download the JIT tests from the firefox repository. see CONTRIBUTING.md for more info");
}
let paths = get_paths(&moz_central_path);
let (failures, total) = walk(&paths);
eprintln!("completed {:?} tests", total);
if !failures.is_empty() {
panic!(
"{:?} tests failed\n{:?}",
failures.len(),
failures.join("\n")
);
}
}
fn get_paths(root: &Path) -> Vec<PathBuf> {
walkdir::WalkDir::new(root)
.min_depth(1)
.into_iter()
.filter_map(|e| {
let entry = e.expect("bad entry");
let path = entry.into_path();
if path.is_file() {
if let Some(ext) = path.extension() {
if ext == "js" {
Some(path)
} else {
None
}
} else {
None
}
} else {
None
}
})
.collect()
}
fn walk(paths: &[PathBuf]) -> (Vec<String>, usize) {
let mut ret = Vec::new();
let mut ct = 0;
for path in paths {
ct += 1;
let js = read_to_string(&path).unwrap();
let s = Scanner::new(js.as_str());
for item in s {
if let Err(e) = item {
ret.push(format!("{:?}, path: {:?}", e, path.display()));
}
}
}
(ret, ct)
}
|
use ray::{ Ray, HitInfo };
use std::fmt::Debug;
pub trait SceneObject: Debug {
fn intersect(&self, ray: &Ray, min: f64, max: f64) -> Option<HitInfo>;
}
|
pub(crate) mod comment;
mod file_entry;
mod line_entry;
pub(crate) mod output;
mod warning;
use colored::*;
pub use file_entry::FileEntry;
pub use line_entry::LineEntry;
pub use output::Output;
pub use warning::Warning;
pub const LF: &str = "\n";
pub fn remove_invalid_leading_chars(string: &str) -> String {
string
.chars()
.skip_while(|&c| !(c.is_alphabetic() || c == '_'))
.collect()
}
pub(crate) mod tests {
use super::*;
use std::path::PathBuf;
#[allow(dead_code)]
pub fn blank_line_entry(number: usize, total_lines: usize) -> LineEntry {
LineEntry {
number,
file: FileEntry {
path: PathBuf::from(".env"),
file_name: ".env".to_string(),
total_lines,
},
raw_string: String::from("\n"),
is_deleted: false,
}
}
#[allow(dead_code)]
pub fn line_entry(number: usize, total_lines: usize, raw_string: &str) -> LineEntry {
LineEntry {
number,
file: FileEntry {
path: PathBuf::from(".env"),
file_name: ".env".to_string(),
total_lines,
},
raw_string: String::from(raw_string),
is_deleted: false,
}
}
#[test]
fn remove_invalid_leading_chars_test() {
let string = String::from("-1&*FOO");
assert_eq!("FOO", remove_invalid_leading_chars(&string));
let string = String::from("***FOO-BAR");
assert_eq!("FOO-BAR", remove_invalid_leading_chars(&string));
}
}
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
// signal handling related tests.
use nix::sys::signal::Signal;
use reverie::syscalls::AddrMut;
use reverie::syscalls::ExitGroup;
use reverie::syscalls::MemoryAccess;
use reverie::syscalls::RtSigpending;
use reverie::syscalls::Syscall;
use reverie::syscalls::SyscallInfo;
use reverie::syscalls::Sysno;
use reverie::Error;
use reverie::Guest;
use reverie::Tool;
#[derive(Debug, Default, Clone)]
struct LocalState;
#[reverie::tool]
impl Tool for LocalState {
type GlobalState = ();
type ThreadState = ();
async fn handle_syscall_event<T: Guest<Self>>(
&self,
guest: &mut T,
syscall: Syscall,
) -> Result<i64, Error> {
if syscall.number() == Sysno::exit_group {
let sigset_rptr = 0x7000_0100usize;
let sigset: AddrMut<libc::sigset_t> = AddrMut::from_raw(sigset_rptr as _).unwrap();
let exit_failure = ExitGroup::new().with_status(1);
let exit_success = syscall;
if guest
.inject(
RtSigpending::new()
.with_set(Some(sigset))
.with_sigsetsize(8usize),
)
.await
.is_ok()
{
let memory = guest.memory();
let pending: u64 = memory.read_value(sigset.cast())?;
if pending != 1u64 << (Signal::SIGVTALRM as i32 - 1) {
guest.tail_inject(exit_failure).await
} else {
guest.tail_inject(exit_success).await
}
} else {
guest.tail_inject(exit_success).await
}
} else {
guest.tail_inject(syscall).await
}
}
}
#[cfg(all(not(sanitized), test))]
mod tests {
use std::io;
use std::mem::MaybeUninit;
use nix::sys::signal;
use reverie_ptrace::testing::check_fn;
use super::*;
// kernel_sigset_t used by naked syscall
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
struct KernelSigset(u64);
impl From<&[Signal]> for KernelSigset {
fn from(signals: &[Signal]) -> Self {
let mut set: u64 = 0;
for &sig in signals {
set |= 1u64 << (sig as usize - 1);
}
KernelSigset(set)
}
}
#[allow(dead_code)]
unsafe fn block_signals(signals: &[Signal]) -> io::Result<KernelSigset> {
let set = KernelSigset::from(signals);
let mut oldset: MaybeUninit<u64> = MaybeUninit::uninit();
if libc::syscall(
libc::SYS_rt_sigprocmask,
libc::SIG_BLOCK,
&set as *const _,
oldset.as_mut_ptr(),
8,
) != 0
{
Err(io::Error::last_os_error())
} else {
Ok(KernelSigset(oldset.assume_init()))
}
}
#[test]
// The actual test is in `handle_syscall_event`. To test we can get
// pending signals from tracee, by injecting rt_sigpending.
fn can_get_pending_signals() {
check_fn::<LocalState, _>(|| {
assert!(unsafe { block_signals(&[Signal::SIGVTALRM]) }.is_ok());
assert!(signal::raise(Signal::SIGVTALRM).is_ok());
unsafe { libc::syscall(libc::SYS_exit_group, 0) };
});
}
}
|
#[doc = "Register `RQR` reader"]
pub type R = crate::R<RQR_SPEC>;
#[doc = "Register `RQR` writer"]
pub type W = crate::W<RQR_SPEC>;
#[doc = "Field `ABRRQ` reader - Auto baud rate request"]
pub type ABRRQ_R = crate::BitReader<ABRRQ_A>;
#[doc = "Auto baud rate request\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ABRRQ_A {
#[doc = "1: resets the ABRF flag in the USART_ISR and request an automatic baud rate measurement on the next received data frame"]
Request = 1,
}
impl From<ABRRQ_A> for bool {
#[inline(always)]
fn from(variant: ABRRQ_A) -> Self {
variant as u8 != 0
}
}
impl ABRRQ_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<ABRRQ_A> {
match self.bits {
true => Some(ABRRQ_A::Request),
_ => None,
}
}
#[doc = "resets the ABRF flag in the USART_ISR and request an automatic baud rate measurement on the next received data frame"]
#[inline(always)]
pub fn is_request(&self) -> bool {
*self == ABRRQ_A::Request
}
}
#[doc = "Field `ABRRQ` writer - Auto baud rate request"]
pub type ABRRQ_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, ABRRQ_A>;
impl<'a, REG, const O: u8> ABRRQ_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "resets the ABRF flag in the USART_ISR and request an automatic baud rate measurement on the next received data frame"]
#[inline(always)]
pub fn request(self) -> &'a mut crate::W<REG> {
self.variant(ABRRQ_A::Request)
}
}
#[doc = "Field `SBKRQ` reader - Send break request"]
pub type SBKRQ_R = crate::BitReader<SBKRQ_A>;
#[doc = "Send break request\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SBKRQ_A {
#[doc = "1: sets the SBKF flag and request to send a BREAK on the line, as soon as the transmit machine is available"]
Break = 1,
}
impl From<SBKRQ_A> for bool {
#[inline(always)]
fn from(variant: SBKRQ_A) -> Self {
variant as u8 != 0
}
}
impl SBKRQ_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<SBKRQ_A> {
match self.bits {
true => Some(SBKRQ_A::Break),
_ => None,
}
}
#[doc = "sets the SBKF flag and request to send a BREAK on the line, as soon as the transmit machine is available"]
#[inline(always)]
pub fn is_break(&self) -> bool {
*self == SBKRQ_A::Break
}
}
#[doc = "Field `SBKRQ` writer - Send break request"]
pub type SBKRQ_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, SBKRQ_A>;
impl<'a, REG, const O: u8> SBKRQ_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "sets the SBKF flag and request to send a BREAK on the line, as soon as the transmit machine is available"]
#[inline(always)]
pub fn break_(self) -> &'a mut crate::W<REG> {
self.variant(SBKRQ_A::Break)
}
}
#[doc = "Field `MMRQ` reader - Mute mode request"]
pub type MMRQ_R = crate::BitReader<MMRQ_A>;
#[doc = "Mute mode request\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MMRQ_A {
#[doc = "1: Puts the USART in mute mode and sets the RWU flag"]
Mute = 1,
}
impl From<MMRQ_A> for bool {
#[inline(always)]
fn from(variant: MMRQ_A) -> Self {
variant as u8 != 0
}
}
impl MMRQ_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<MMRQ_A> {
match self.bits {
true => Some(MMRQ_A::Mute),
_ => None,
}
}
#[doc = "Puts the USART in mute mode and sets the RWU flag"]
#[inline(always)]
pub fn is_mute(&self) -> bool {
*self == MMRQ_A::Mute
}
}
#[doc = "Field `MMRQ` writer - Mute mode request"]
pub type MMRQ_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, MMRQ_A>;
impl<'a, REG, const O: u8> MMRQ_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Puts the USART in mute mode and sets the RWU flag"]
#[inline(always)]
pub fn mute(self) -> &'a mut crate::W<REG> {
self.variant(MMRQ_A::Mute)
}
}
#[doc = "Field `RXFRQ` reader - Receive data flush request"]
pub type RXFRQ_R = crate::BitReader<RXFRQ_A>;
#[doc = "Receive data flush request\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RXFRQ_A {
#[doc = "1: clears the RXNE flag. This allows to discard the received data without reading it, and avoid an overrun condition"]
Discard = 1,
}
impl From<RXFRQ_A> for bool {
#[inline(always)]
fn from(variant: RXFRQ_A) -> Self {
variant as u8 != 0
}
}
impl RXFRQ_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<RXFRQ_A> {
match self.bits {
true => Some(RXFRQ_A::Discard),
_ => None,
}
}
#[doc = "clears the RXNE flag. This allows to discard the received data without reading it, and avoid an overrun condition"]
#[inline(always)]
pub fn is_discard(&self) -> bool {
*self == RXFRQ_A::Discard
}
}
#[doc = "Field `RXFRQ` writer - Receive data flush request"]
pub type RXFRQ_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, RXFRQ_A>;
impl<'a, REG, const O: u8> RXFRQ_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "clears the RXNE flag. This allows to discard the received data without reading it, and avoid an overrun condition"]
#[inline(always)]
pub fn discard(self) -> &'a mut crate::W<REG> {
self.variant(RXFRQ_A::Discard)
}
}
#[doc = "Field `TXFRQ` reader - Transmit data flush request"]
pub type TXFRQ_R = crate::BitReader<TXFRQ_A>;
#[doc = "Transmit data flush request\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TXFRQ_A {
#[doc = "1: Set the TXE flags. This allows to discard the transmit data"]
Discard = 1,
}
impl From<TXFRQ_A> for bool {
#[inline(always)]
fn from(variant: TXFRQ_A) -> Self {
variant as u8 != 0
}
}
impl TXFRQ_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> Option<TXFRQ_A> {
match self.bits {
true => Some(TXFRQ_A::Discard),
_ => None,
}
}
#[doc = "Set the TXE flags. This allows to discard the transmit data"]
#[inline(always)]
pub fn is_discard(&self) -> bool {
*self == TXFRQ_A::Discard
}
}
#[doc = "Field `TXFRQ` writer - Transmit data flush request"]
pub type TXFRQ_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, TXFRQ_A>;
impl<'a, REG, const O: u8> TXFRQ_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Set the TXE flags. This allows to discard the transmit data"]
#[inline(always)]
pub fn discard(self) -> &'a mut crate::W<REG> {
self.variant(TXFRQ_A::Discard)
}
}
impl R {
#[doc = "Bit 0 - Auto baud rate request"]
#[inline(always)]
pub fn abrrq(&self) -> ABRRQ_R {
ABRRQ_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Send break request"]
#[inline(always)]
pub fn sbkrq(&self) -> SBKRQ_R {
SBKRQ_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Mute mode request"]
#[inline(always)]
pub fn mmrq(&self) -> MMRQ_R {
MMRQ_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Receive data flush request"]
#[inline(always)]
pub fn rxfrq(&self) -> RXFRQ_R {
RXFRQ_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Transmit data flush request"]
#[inline(always)]
pub fn txfrq(&self) -> TXFRQ_R {
TXFRQ_R::new(((self.bits >> 4) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Auto baud rate request"]
#[inline(always)]
#[must_use]
pub fn abrrq(&mut self) -> ABRRQ_W<RQR_SPEC, 0> {
ABRRQ_W::new(self)
}
#[doc = "Bit 1 - Send break request"]
#[inline(always)]
#[must_use]
pub fn sbkrq(&mut self) -> SBKRQ_W<RQR_SPEC, 1> {
SBKRQ_W::new(self)
}
#[doc = "Bit 2 - Mute mode request"]
#[inline(always)]
#[must_use]
pub fn mmrq(&mut self) -> MMRQ_W<RQR_SPEC, 2> {
MMRQ_W::new(self)
}
#[doc = "Bit 3 - Receive data flush request"]
#[inline(always)]
#[must_use]
pub fn rxfrq(&mut self) -> RXFRQ_W<RQR_SPEC, 3> {
RXFRQ_W::new(self)
}
#[doc = "Bit 4 - Transmit data flush request"]
#[inline(always)]
#[must_use]
pub fn txfrq(&mut self) -> TXFRQ_W<RQR_SPEC, 4> {
TXFRQ_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Request register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rqr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rqr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RQR_SPEC;
impl crate::RegisterSpec for RQR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rqr::R`](R) reader structure"]
impl crate::Readable for RQR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`rqr::W`](W) writer structure"]
impl crate::Writable for RQR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets RQR to value 0"]
impl crate::Resettable for RQR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[doc = "Register `AHB2RSTR` reader"]
pub type R = crate::R<AHB2RSTR_SPEC>;
#[doc = "Register `AHB2RSTR` writer"]
pub type W = crate::W<AHB2RSTR_SPEC>;
#[doc = "Field `GPIOARST` reader - IO port A reset"]
pub type GPIOARST_R = crate::BitReader<GPIOARST_A>;
#[doc = "IO port A reset\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum GPIOARST_A {
#[doc = "0: No effect"]
NoReset = 0,
#[doc = "1: Reset peripheral"]
Reset = 1,
}
impl From<GPIOARST_A> for bool {
#[inline(always)]
fn from(variant: GPIOARST_A) -> Self {
variant as u8 != 0
}
}
impl GPIOARST_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> GPIOARST_A {
match self.bits {
false => GPIOARST_A::NoReset,
true => GPIOARST_A::Reset,
}
}
#[doc = "No effect"]
#[inline(always)]
pub fn is_no_reset(&self) -> bool {
*self == GPIOARST_A::NoReset
}
#[doc = "Reset peripheral"]
#[inline(always)]
pub fn is_reset(&self) -> bool {
*self == GPIOARST_A::Reset
}
}
#[doc = "Field `GPIOARST` writer - IO port A reset"]
pub type GPIOARST_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, GPIOARST_A>;
impl<'a, REG, const O: u8> GPIOARST_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "No effect"]
#[inline(always)]
pub fn no_reset(self) -> &'a mut crate::W<REG> {
self.variant(GPIOARST_A::NoReset)
}
#[doc = "Reset peripheral"]
#[inline(always)]
pub fn reset(self) -> &'a mut crate::W<REG> {
self.variant(GPIOARST_A::Reset)
}
}
#[doc = "Field `GPIOBRST` reader - IO port B reset"]
pub use GPIOARST_R as GPIOBRST_R;
#[doc = "Field `GPIOCRST` reader - IO port C reset"]
pub use GPIOARST_R as GPIOCRST_R;
#[doc = "Field `GPIOHRST` reader - IO port H reset"]
pub use GPIOARST_R as GPIOHRST_R;
#[doc = "Field `GPIOBRST` writer - IO port B reset"]
pub use GPIOARST_W as GPIOBRST_W;
#[doc = "Field `GPIOCRST` writer - IO port C reset"]
pub use GPIOARST_W as GPIOCRST_W;
#[doc = "Field `GPIOHRST` writer - IO port H reset"]
pub use GPIOARST_W as GPIOHRST_W;
impl R {
#[doc = "Bit 0 - IO port A reset"]
#[inline(always)]
pub fn gpioarst(&self) -> GPIOARST_R {
GPIOARST_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - IO port B reset"]
#[inline(always)]
pub fn gpiobrst(&self) -> GPIOBRST_R {
GPIOBRST_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - IO port C reset"]
#[inline(always)]
pub fn gpiocrst(&self) -> GPIOCRST_R {
GPIOCRST_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 7 - IO port H reset"]
#[inline(always)]
pub fn gpiohrst(&self) -> GPIOHRST_R {
GPIOHRST_R::new(((self.bits >> 7) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - IO port A reset"]
#[inline(always)]
#[must_use]
pub fn gpioarst(&mut self) -> GPIOARST_W<AHB2RSTR_SPEC, 0> {
GPIOARST_W::new(self)
}
#[doc = "Bit 1 - IO port B reset"]
#[inline(always)]
#[must_use]
pub fn gpiobrst(&mut self) -> GPIOBRST_W<AHB2RSTR_SPEC, 1> {
GPIOBRST_W::new(self)
}
#[doc = "Bit 2 - IO port C reset"]
#[inline(always)]
#[must_use]
pub fn gpiocrst(&mut self) -> GPIOCRST_W<AHB2RSTR_SPEC, 2> {
GPIOCRST_W::new(self)
}
#[doc = "Bit 7 - IO port H reset"]
#[inline(always)]
#[must_use]
pub fn gpiohrst(&mut self) -> GPIOHRST_W<AHB2RSTR_SPEC, 7> {
GPIOHRST_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "AHB2 peripheral reset register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ahb2rstr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ahb2rstr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct AHB2RSTR_SPEC;
impl crate::RegisterSpec for AHB2RSTR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ahb2rstr::R`](R) reader structure"]
impl crate::Readable for AHB2RSTR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ahb2rstr::W`](W) writer structure"]
impl crate::Writable for AHB2RSTR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets AHB2RSTR to value 0"]
impl crate::Resettable for AHB2RSTR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#![feature(libc)]
#![feature(concat_idents)]
pub mod secure_buffer;
pub mod hash; |
use std::{
collections::HashMap,
io::{stdin, stdout, Write},
};
fn main() {
num_demo();
emp_demo();
}
fn num_demo() {
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.entry(String::from("Blue")).or_insert(25);
scores.insert(String::from("Yellow"), 50);
for (key, value) in &scores {
println!("{}: {}", key, value);
}
let text = "hello world wonderful world";
let mut map = HashMap::new();
for word in text.split_whitespace() {
let count = map.entry(word).or_insert(0);
*count += 1;
}
println!("{:?}", map);
let intlist = vec![2, 2, 1, 5, 4, 5, 6, 3, 7];
println!("avg: {}", avg(&intlist));
println!("median: {}", median(&intlist));
println!("mode: {}", mode(&intlist));
println!("pig: {}", pig_latin("first"));
}
fn avg(data: &Vec<u32>) -> f64 {
let sum: u32 = data.iter().sum();
let size = data.len();
sum as f64 / size as f64
}
fn median(data: &Vec<u32>) -> u32 {
let mut s = Vec::new();
for i in data {
s.push(i);
}
s.sort();
*s[s.len() / 2]
}
fn mode(data: &Vec<u32>) -> u32 {
let mut h = HashMap::new();
for i in data {
let count = h.entry(i).or_insert(0);
*count += 1;
}
let mut mode = 0;
let mut max_count = 0;
for (key, value) in h {
if max_count < value {
max_count = value;
mode = *key;
}
}
mode
}
fn pig_latin(data: &str) -> String {
let mut word = String::from(data);
let first = word.chars().nth(0).unwrap();
match first {
'a' | 'i' | 'u' | 'e' | 'o' => {
word.push_str("hay");
}
_ => {
word.push(first);
word.remove(0);
word.push_str("ay");
}
}
word
}
fn emp_demo() {
let mut employees: HashMap<String, Vec<String>> = HashMap::new();
loop {
let mut input = String::new();
println!("Command: quit, add 部署 名前, list 部署");
print!("> ");
stdout().flush().unwrap();
stdin().read_line(&mut input).ok();
let commands: Vec<&str> = input.trim().split_whitespace().collect();
if commands.len() > 0 {
if !run_command(commands, &mut employees) {
break;
}
}
}
}
fn run_command(commands: Vec<&str>, employees: &mut HashMap<String, Vec<String>>) -> bool {
match commands[0] {
"quit" => false,
"add" if commands.len() == 3 => {
add_employee(employees, commands[1], commands[2]);
true
}
"list" => {
match commands.get(1) {
Some(department) => println!("{:?}", employees.get::<str>(department)),
None => println!("{:?}", employees),
}
true
}
_ => true,
}
}
fn add_employee(employees: &mut HashMap<String, Vec<String>>, department: &str, employee: &str) {
employees
.entry(department.to_string())
.or_insert(Vec::new());
employees
.get_mut(department)
.unwrap()
.push(employee.to_string());
}
|
// extern crate syntax;
//
// use syntax::ext::quote::rt::ToTokens;
// use syntax::ast::TokenTree;
// use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
#![allow(unused_imports)]
#![allow(dead_code)]
#![allow(unused_assignments)]
#![allow(unused_variables)]
use std::fmt;
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Child {
Text(String),
Node(Node)
}
pub trait ToChild {
fn to_child(&self) -> Child;
}
impl Child {
pub fn from<T: ToChild>(input: T) -> Child {
input.to_child()
}
}
impl ToChild for Child {
fn to_child(&self) -> Child {
self.to_owned()
}
}
impl<T: ToString> ToChild for T {
fn to_child(&self) -> Child {
Child::Text(self.to_string())
}
}
impl ToChild for Node {
fn to_child(&self) -> Child {
Child::Node(self.to_owned())
}
}
// impl ToString for Child {
// fn to_string(&self) -> String {
// match self {
// &Child::Text(ref s) => s.to_owned(),
// &Child::Node(ref t) => t.to_string()
// }
// }
// }
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Node {
pub selector: String,
// pub props: Vec<String>,
pub children: Vec<Child>
}
impl Node {
fn new() -> Node {
Node { selector: String::new(), children: Vec::new() }
}
}
pub trait Component {
fn render(&self) -> Node;
}
pub trait Mounted {
fn did_mount(&self);
}
pub trait Unmounted {
fn did_unmount(&self);
}
pub trait Updated {
fn did_update(&self);
}
pub trait Mount {
fn will_mount(&self);
}
pub trait Unmount {
fn will_unmount(&self);
}
pub trait Update {
fn will_update(&self);
}
pub trait ShouldUpdate {
fn should_update(&self);
}
// pub fn render<'a, T: Component>(selector: &'a str, node: T) {
// println!("<{}>{}</{}>", selector, node.render(), selector);
// }
// pub fn render<'a>(selector: &'a str, node: Node) {
// println!("<{}>{}</{}>", selector, node, selector);
// }
// macro_rules! render {
// ($a:expr, $b:expr) => {
// $b
// }
// }
|
use super::*;
use graph::{EdgeT, NodeT};
use numpy::{PyArray, PyArray1};
use std::collections::HashMap;
#[pymethods]
impl EnsmallenGraph {
#[text_signature = "($self)"]
/// Returns mean node degree of the graph.
pub fn degrees_mean(&self) -> f64 {
self.graph.degrees_mean()
}
#[text_signature = "($self, verbose)"]
/// Returns number of connected components in graph.
///
/// Returns
/// ------------------------
/// Number of connected components.
pub fn connected_components_number(&self, verbose: bool) -> (NodeT, NodeT, NodeT) {
self.graph.connected_components_number(verbose)
}
#[text_signature = "($self)"]
/// Returns number of self-loops.
pub fn get_selfloops_number(&self) -> EdgeT {
self.graph.get_self_loop_number()
}
#[text_signature = "($self)"]
/// Returns ratio of self-loops.
pub fn get_selfloops_rate(&self) -> f64 {
self.graph.get_self_loop_rate()
}
#[text_signature = "($self)"]
/// Returns median node degree of the graph.
pub fn degrees_median(&self) -> NodeT {
self.graph.degrees_median()
}
#[text_signature = "($self)"]
/// Returns mode node degree of the graph.
pub fn degrees_mode(&self) -> NodeT {
self.graph.degrees_mode()
}
#[text_signature = "($self)"]
/// Returns report relative to the graph metrics.
///
/// The report includes a few useful metrics like:
///
/// * degrees_median: the median degree of the nodes.
/// * degrees_mean: the mean degree of the nodes.
/// * degrees_mode: the mode degree of the nodes.
/// * degrees_max: the max degree of the nodes.
/// * degrees_min: the min degree of the nodes.
/// * nodes_number: the number of nodes in the graph.
/// * edges_number: the number of edges in the graph.
/// * unique_node_types_number: the number of different node types in the graph.
/// * unique_edge_types_number: the number of different edge types in the graph.
/// * traps_rate: probability to end up in a trap when starting into any given node.
/// * selfloops_rate: pecentage of edges that are selfloops.
///
fn report(&self) -> HashMap<&str, String> {
self.graph.report()
}
/// Return report on overlaps of the two graphs.
///
/// Parameters
/// -------------------
/// other: &EnsmallenGraph,
/// Graph to compute the overlaps with.
/// verbose: bool = True,
/// Wether to show loading bars.
///
/// Returns
/// -------------------
/// Textual report.
fn overlap_textual_report(&self, other: &EnsmallenGraph, verbose: Option<bool>) -> PyResult<String> {
pyex!(self.graph.overlap_textual_report(&other.graph, verbose.unwrap_or(true)))
}
#[text_signature = "($self, node)"]
/// Return the degree for the given node.
///
/// Parameters
/// ---------------------
/// node: int,
/// Node ID to use to compute degrees product.
///
/// Returns
/// ----------------------------
/// degrees product for the two given nodes.
///
fn degree(&self, node: NodeT) -> NodeT {
self.graph.get_node_degree(node)
}
#[text_signature = "($self)"]
/// Return all the degrees of the nodes graph.
///
/// Returns
/// ----------------------------
/// Numpy array with all the degrees of the graph.
///
fn degrees(&self) -> PyResult<Py<PyArray1<NodeT>>> {
let degrees = self.graph.get_node_degrees();
let gil = pyo3::Python::acquire_gil();
Ok(to_nparray_1d!(gil, degrees, NodeT))
}
#[text_signature = "($self, one, two)"]
/// Return the Jaccard Index for the two given nodes.
///
/// Parameters
/// ---------------------
/// one: int,
/// First node ID to use to compute Jaccard Index.
/// two: int,
/// Second node ID to use to compute Jaccard Index.
///
/// Returns
/// ----------------------------
/// Jaccard Index for the two given nodes.
///
fn jaccard_index(&self, one: NodeT, two: NodeT) -> PyResult<f64> {
pyex!(self.graph.jaccard_index(one, two))
}
#[text_signature = "($self, one, two)"]
/// Return the Adamic/Adar for the two given nodes.
///
/// Parameters
/// ---------------------
/// one: int,
/// First node ID to use to compute Adamic/Adar.
/// two: int,
/// Second node ID to use to compute Adamic/Adar.
///
/// Returns
/// ----------------------------
/// Adamic/Adar for the two given nodes.
///
fn adamic_adar_index(&self, one: NodeT, two: NodeT) -> PyResult<f64> {
pyex!(self.graph.adamic_adar_index(one, two))
}
#[text_signature = "($self, one, two)"]
/// Return the Resource Allocation Index for the two given nodes.
///
/// Parameters
/// ---------------------
/// one: int,
/// First node ID to use to compute Resource Allocation Index.
/// two: int,
/// Second node ID to use to compute Resource Allocation Index.
///
/// Returns
/// ----------------------------
/// Resource Allocation Index for the two given nodes.
///
fn resource_allocation_index(&self, one: NodeT, two: NodeT) -> PyResult<f64> {
pyex!(self.graph.resource_allocation_index(one, two))
}
#[text_signature = "($self, one, two)"]
/// Return the degrees product for the two given nodes.
///
/// Parameters
/// ---------------------
/// one: int,
/// First node ID to use to compute degrees product.
/// two: int,
/// Second node ID to use to compute degrees product.
///
/// Returns
/// ----------------------------
/// degrees product for the two given nodes.
///
fn degrees_product(&self, one: NodeT, two: NodeT) -> PyResult<usize> {
pyex!(self.graph.degrees_product(one, two))
}
#[text_signature = "(self)"]
/// Return the traps rate of the graph.
///
/// This feature is EXPERIMENTAL and still required proving.
///
fn traps_rate(&self) -> f64 {
self.graph.traps_rate()
}
}
|
#[doc = "Register `FLTINR2` reader"]
pub type R = crate::R<FLTINR2_SPEC>;
#[doc = "Register `FLTINR2` writer"]
pub type W = crate::W<FLTINR2_SPEC>;
#[doc = "Field `FLT5E` reader - FLT5E"]
pub type FLT5E_R = crate::BitReader;
#[doc = "Field `FLT5E` writer - FLT5E"]
pub type FLT5E_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLT5P` reader - FLT5P"]
pub type FLT5P_R = crate::BitReader;
#[doc = "Field `FLT5P` writer - FLT5P"]
pub type FLT5P_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLT5SRC` reader - FLT5SRC"]
pub type FLT5SRC_R = crate::BitReader;
#[doc = "Field `FLT5SRC` writer - FLT5SRC"]
pub type FLT5SRC_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLT5F` reader - FLT5F"]
pub type FLT5F_R = crate::FieldReader;
#[doc = "Field `FLT5F` writer - FLT5F"]
pub type FLT5F_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `FLT5LCK` reader - FLT5LCK"]
pub type FLT5LCK_R = crate::BitReader;
#[doc = "Field `FLT5LCK` writer - FLT5LCK"]
pub type FLT5LCK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLT6E` reader - FLT6E"]
pub type FLT6E_R = crate::BitReader;
#[doc = "Field `FLT6E` writer - FLT6E"]
pub type FLT6E_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLT6P` reader - FLT6P"]
pub type FLT6P_R = crate::BitReader;
#[doc = "Field `FLT6P` writer - FLT6P"]
pub type FLT6P_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLT6SRC_0` reader - FLT6F"]
pub type FLT6SRC_0_R = crate::BitReader;
#[doc = "Field `FLT6SRC_0` writer - FLT6F"]
pub type FLT6SRC_0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLT6F` reader - FLT6F"]
pub type FLT6F_R = crate::FieldReader;
#[doc = "Field `FLT6F` writer - FLT6F"]
pub type FLT6F_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `FLT6LCK` reader - FLT6LCK"]
pub type FLT6LCK_R = crate::BitReader;
#[doc = "Field `FLT6LCK` writer - FLT6LCK"]
pub type FLT6LCK_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLT1SRC_1` reader - FLT1SRC_1"]
pub type FLT1SRC_1_R = crate::BitReader;
#[doc = "Field `FLT1SRC_1` writer - FLT1SRC_1"]
pub type FLT1SRC_1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLT2SRC_1` reader - FLT2SRC_1"]
pub type FLT2SRC_1_R = crate::BitReader;
#[doc = "Field `FLT2SRC_1` writer - FLT2SRC_1"]
pub type FLT2SRC_1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLT3SRC_1` reader - FLT3SRC_1"]
pub type FLT3SRC_1_R = crate::BitReader;
#[doc = "Field `FLT3SRC_1` writer - FLT3SRC_1"]
pub type FLT3SRC_1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLT4SRC_1` reader - FLT4SRC_1"]
pub type FLT4SRC_1_R = crate::BitReader;
#[doc = "Field `FLT4SRC_1` writer - FLT4SRC_1"]
pub type FLT4SRC_1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLT5SRC_1` reader - FLT5SRC_1"]
pub type FLT5SRC_1_R = crate::BitReader;
#[doc = "Field `FLT5SRC_1` writer - FLT5SRC_1"]
pub type FLT5SRC_1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLT6SRC_1` reader - FLT6SRC"]
pub type FLT6SRC_1_R = crate::BitReader;
#[doc = "Field `FLT6SRC_1` writer - FLT6SRC"]
pub type FLT6SRC_1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `FLTSD` reader - FLTSD"]
pub type FLTSD_R = crate::FieldReader;
#[doc = "Field `FLTSD` writer - FLTSD"]
pub type FLTSD_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
impl R {
#[doc = "Bit 0 - FLT5E"]
#[inline(always)]
pub fn flt5e(&self) -> FLT5E_R {
FLT5E_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - FLT5P"]
#[inline(always)]
pub fn flt5p(&self) -> FLT5P_R {
FLT5P_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - FLT5SRC"]
#[inline(always)]
pub fn flt5src(&self) -> FLT5SRC_R {
FLT5SRC_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bits 3:6 - FLT5F"]
#[inline(always)]
pub fn flt5f(&self) -> FLT5F_R {
FLT5F_R::new(((self.bits >> 3) & 0x0f) as u8)
}
#[doc = "Bit 7 - FLT5LCK"]
#[inline(always)]
pub fn flt5lck(&self) -> FLT5LCK_R {
FLT5LCK_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - FLT6E"]
#[inline(always)]
pub fn flt6e(&self) -> FLT6E_R {
FLT6E_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - FLT6P"]
#[inline(always)]
pub fn flt6p(&self) -> FLT6P_R {
FLT6P_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - FLT6F"]
#[inline(always)]
pub fn flt6src_0(&self) -> FLT6SRC_0_R {
FLT6SRC_0_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bits 11:14 - FLT6F"]
#[inline(always)]
pub fn flt6f(&self) -> FLT6F_R {
FLT6F_R::new(((self.bits >> 11) & 0x0f) as u8)
}
#[doc = "Bit 15 - FLT6LCK"]
#[inline(always)]
pub fn flt6lck(&self) -> FLT6LCK_R {
FLT6LCK_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - FLT1SRC_1"]
#[inline(always)]
pub fn flt1src_1(&self) -> FLT1SRC_1_R {
FLT1SRC_1_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - FLT2SRC_1"]
#[inline(always)]
pub fn flt2src_1(&self) -> FLT2SRC_1_R {
FLT2SRC_1_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - FLT3SRC_1"]
#[inline(always)]
pub fn flt3src_1(&self) -> FLT3SRC_1_R {
FLT3SRC_1_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - FLT4SRC_1"]
#[inline(always)]
pub fn flt4src_1(&self) -> FLT4SRC_1_R {
FLT4SRC_1_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - FLT5SRC_1"]
#[inline(always)]
pub fn flt5src_1(&self) -> FLT5SRC_1_R {
FLT5SRC_1_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - FLT6SRC"]
#[inline(always)]
pub fn flt6src_1(&self) -> FLT6SRC_1_R {
FLT6SRC_1_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bits 24:25 - FLTSD"]
#[inline(always)]
pub fn fltsd(&self) -> FLTSD_R {
FLTSD_R::new(((self.bits >> 24) & 3) as u8)
}
}
impl W {
#[doc = "Bit 0 - FLT5E"]
#[inline(always)]
#[must_use]
pub fn flt5e(&mut self) -> FLT5E_W<FLTINR2_SPEC, 0> {
FLT5E_W::new(self)
}
#[doc = "Bit 1 - FLT5P"]
#[inline(always)]
#[must_use]
pub fn flt5p(&mut self) -> FLT5P_W<FLTINR2_SPEC, 1> {
FLT5P_W::new(self)
}
#[doc = "Bit 2 - FLT5SRC"]
#[inline(always)]
#[must_use]
pub fn flt5src(&mut self) -> FLT5SRC_W<FLTINR2_SPEC, 2> {
FLT5SRC_W::new(self)
}
#[doc = "Bits 3:6 - FLT5F"]
#[inline(always)]
#[must_use]
pub fn flt5f(&mut self) -> FLT5F_W<FLTINR2_SPEC, 3> {
FLT5F_W::new(self)
}
#[doc = "Bit 7 - FLT5LCK"]
#[inline(always)]
#[must_use]
pub fn flt5lck(&mut self) -> FLT5LCK_W<FLTINR2_SPEC, 7> {
FLT5LCK_W::new(self)
}
#[doc = "Bit 8 - FLT6E"]
#[inline(always)]
#[must_use]
pub fn flt6e(&mut self) -> FLT6E_W<FLTINR2_SPEC, 8> {
FLT6E_W::new(self)
}
#[doc = "Bit 9 - FLT6P"]
#[inline(always)]
#[must_use]
pub fn flt6p(&mut self) -> FLT6P_W<FLTINR2_SPEC, 9> {
FLT6P_W::new(self)
}
#[doc = "Bit 10 - FLT6F"]
#[inline(always)]
#[must_use]
pub fn flt6src_0(&mut self) -> FLT6SRC_0_W<FLTINR2_SPEC, 10> {
FLT6SRC_0_W::new(self)
}
#[doc = "Bits 11:14 - FLT6F"]
#[inline(always)]
#[must_use]
pub fn flt6f(&mut self) -> FLT6F_W<FLTINR2_SPEC, 11> {
FLT6F_W::new(self)
}
#[doc = "Bit 15 - FLT6LCK"]
#[inline(always)]
#[must_use]
pub fn flt6lck(&mut self) -> FLT6LCK_W<FLTINR2_SPEC, 15> {
FLT6LCK_W::new(self)
}
#[doc = "Bit 16 - FLT1SRC_1"]
#[inline(always)]
#[must_use]
pub fn flt1src_1(&mut self) -> FLT1SRC_1_W<FLTINR2_SPEC, 16> {
FLT1SRC_1_W::new(self)
}
#[doc = "Bit 17 - FLT2SRC_1"]
#[inline(always)]
#[must_use]
pub fn flt2src_1(&mut self) -> FLT2SRC_1_W<FLTINR2_SPEC, 17> {
FLT2SRC_1_W::new(self)
}
#[doc = "Bit 18 - FLT3SRC_1"]
#[inline(always)]
#[must_use]
pub fn flt3src_1(&mut self) -> FLT3SRC_1_W<FLTINR2_SPEC, 18> {
FLT3SRC_1_W::new(self)
}
#[doc = "Bit 19 - FLT4SRC_1"]
#[inline(always)]
#[must_use]
pub fn flt4src_1(&mut self) -> FLT4SRC_1_W<FLTINR2_SPEC, 19> {
FLT4SRC_1_W::new(self)
}
#[doc = "Bit 20 - FLT5SRC_1"]
#[inline(always)]
#[must_use]
pub fn flt5src_1(&mut self) -> FLT5SRC_1_W<FLTINR2_SPEC, 20> {
FLT5SRC_1_W::new(self)
}
#[doc = "Bit 21 - FLT6SRC"]
#[inline(always)]
#[must_use]
pub fn flt6src_1(&mut self) -> FLT6SRC_1_W<FLTINR2_SPEC, 21> {
FLT6SRC_1_W::new(self)
}
#[doc = "Bits 24:25 - FLTSD"]
#[inline(always)]
#[must_use]
pub fn fltsd(&mut self) -> FLTSD_W<FLTINR2_SPEC, 24> {
FLTSD_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "HRTIM Fault Input Register 2\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fltinr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fltinr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct FLTINR2_SPEC;
impl crate::RegisterSpec for FLTINR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`fltinr2::R`](R) reader structure"]
impl crate::Readable for FLTINR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`fltinr2::W`](W) writer structure"]
impl crate::Writable for FLTINR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets FLTINR2 to value 0"]
impl crate::Resettable for FLTINR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use serde;
use serde::de::value::Error;
mod complex_value;
mod field_value;
mod map_value;
mod seq_value;
mod struct_value;
mod value;
pub(crate) use self::field_value::FieldValueDeserializer;
pub(crate) use self::value::ValueDeserializer;
impl From<::internal::gob::Error> for Error {
fn from(err: ::internal::gob::Error) -> Error {
serde::de::Error::custom(format!("{:?}", err))
}
}
|
use crate::library::StoryHandle;
use crate::display;
use crate::input::{number, InputResult};
pub fn title_screen(handles: &[StoryHandle]) -> Option<&StoryHandle> {
println!("CYOA\n");
display::list(handles
.iter()
.map(|handle| &handle.metadata.name)
.enumerate());
loop {
display::prompt(true, false);
match number() {
InputResult::Quit => return None,
InputResult::Invalid(_) => println!("Unknown index"),
InputResult::Item(i) => {
if i < 0 || i as usize >= handles.len() {
println!("Index out of range");
} else {
return Some(&handles[i as usize])
}
}
}
}
}
|
use std::error::Error;
use std::io::{BufReader, BufRead};
use std::fs::File;
use std::path::Path;
#[macro_use]
extern crate simple_error;
fn main() -> Result<(), Box<dyn Error>> {
let path = Path::new("input");
let file = File::open(&path)?;
let mut treemap = Vec::new();
for line in BufReader::new(file).lines() {
let data: Vec<u8> = line?.into();
treemap.push(data);
}
let mut product: u32 = 1;
for (dc, dr) in vec![(1,1), (3,1), (5,1), (7,1), (1,2)] {
let mut row: usize = 0;
let mut col: usize = 0;
let mut trees: u32 = 0;
while row < treemap.len() {
let rowdata = &treemap[row];
match rowdata[col % rowdata.len()] {
b'#' => { trees = trees.checked_add(1).unwrap() },
b'.' => (),
x => bail!("unknown char {}", x)
}
row = row.checked_add(dr).unwrap();
col = col.checked_add(dc).unwrap();
}
println!("right {} down {} -> {}", dr, dc, trees);
product = product.checked_mul(trees).unwrap();
}
println!("{}", product);
Ok(())
}
|
use model;
type Attribute = model::Attribute;
type Param = model::Parameter;
#[test]
fn test_is_param_present() {
let mut t = Attribute {
name:String::new(),
attribute_type:String::new(),
is_array:false,
params:Vec::new(),
};
let p = Param {
name:"param1".to_string(),
value:"param1val".to_string(),
};
t.params.push(Box::new(p));
assert!(t.is_param_present("param1"));
assert!(!t.is_param_present("param2"));
}
#[test]
fn test_is_param_value_present() {
let mut t = Attribute {
name:String::new(),
attribute_type:String::new(),
is_array:false,
params:Vec::new(),
};
let p = Param {
name:"param1".to_string(),
value:"param1val".to_string(),
};
t.params.push(Box::new(p));
assert!(t.is_param_value_present("param1", "param1val"));
assert!(!t.is_param_value_present("param1", "param1value"));
assert!(!t.is_param_value_present("param2", "param1val"));
}
|
#![warn(missing_docs)]
#![feature(proc_macro_hygiene, decl_macro)]
extern crate rocket;
extern crate redis;
extern crate rocket_contrib;
extern crate serde_derive;
use std::convert::From;
use std::io::Cursor;
use redis::{Client, Commands, RedisError};
use rocket::{Outcome, Request, Response, Rocket, State, routes, get, post, delete};
use rocket::fairing::AdHoc;
use rocket::http::{Header, ContentType, Method, Status};
use rocket::request::{self, FromRequest};
use rocket_contrib::json;
use rocket_contrib::json::{Json, JsonValue};
use serde_derive::{Serialize, Deserialize};
#[allow(missing_docs)]
fn main() {
let client = Client::open("redis://redis:6379").unwrap();
rocket(client).launch();
}
/// creates the main Rocket object
fn rocket(client: Client) -> Rocket {
rocket::ignite()
.mount("/", routes![set, get, del])
.attach(AdHoc::on_response("cors", |request: &Request, response: &mut Response| {
// https://github.com/SergioBenitez/Rocket/issues/25#issuecomment-313895086
if request.method() == Method::Options || response.content_type() == Some(ContentType::JSON) {
response.set_header(Header::new("Access-Control-Allow-Origin", "*"));
response.set_header(Header::new("Access-Control-Allow-Methods", "POST, GET, OPTIONS, DELETE"));
response.set_header(Header::new("Access-Control-Allow-Headers", "Content-Type, Accept, X-API-Key"));
response.set_header(Header::new("Access-Control-Allow-Credentials", "true"));
}
if request.method() == Method::Options {
response.set_header(ContentType::Plain);
response.set_sized_body(Cursor::new(""));
response.set_status(Status::Ok);
}
}))
.manage(client)
}
/// A single task to be stored.
#[allow(missing_docs)]
#[derive(Serialize, Deserialize)]
struct Task {
id: String,
text: String,
checked: bool,
children: Vec<String>
}
/// set will create or update a new task
#[post("/set", format = "json", data = "<task>")]
fn set(task: Json<Task>, _key: ApiKey, client: State<Client>) -> Result<JsonValue, RedisError> {
let conn = client.get_connection()?;
conn.hset(task.0.id.clone(), "text", task.0.text)?;
conn.hset(task.0.id.clone(), "checked", task.0.checked)?;
let children_id = task.0.id.clone() + "_children";
conn.del(children_id.clone())?;
for child in task.0.children {
conn.lpush(children_id.clone(), child)?;
}
Ok(json!({
"ok": true,
}))
}
/// get will get a task if it exists, or it will 500 out
#[get("/<id>", format = "json")]
fn get(id: String, _key: ApiKey, client: State<Client>) -> Result<Json<Task>, RedisError> {
let conn = client.get_connection()?;
let checked: String = conn.hget(id.clone(), "checked")?;
Ok(Json(Task{
id: id.clone(),
text: conn.hget(id.clone(), "text")?,
checked: checked == "true",
children: conn.lrange(id + "_children", 0, -1)?,
}))
}
/// delete will remove a task
#[delete("/<id>", format = "json")]
fn del(id: String, _key: ApiKey, client: State<Client>) -> Result<JsonValue, RedisError> {
let conn = client.get_connection()?;
conn.del(id)?;
Ok(json!({
"ok": true,
}))
}
/// API_KEY from the environment, stored at compile time.
const API_KEY: &'static str = env!("API_KEY");
/// https://api.rocket.rs/rocket/request/trait.FromRequest.html#example-1
struct ApiKey(String);
impl<'a, 'r> FromRequest<'a, 'r> for ApiKey {
type Error = ();
fn from_request(request: &'a Request<'r>) -> request::Outcome<ApiKey, ()> {
let keys: Vec<&str> = request.headers().get("X-API-Key").collect();
if keys.len() != 1 {
return Outcome::Failure((Status::BadRequest, ()));
}
let key = keys[0];
if key != API_KEY {
Outcome::Forward(())
} else {
Outcome::Success(ApiKey(key.to_owned()))
}
}
}
#[cfg(test)]
mod test {
extern crate serde_json;
use rocket::local::Client;
use rocket::http::{ContentType, Status, Header};
use super::{rocket, Task, API_KEY};
#[test]
fn works() {
let client = redis::Client::open("redis://127.0.0.1:6379").unwrap();
let client = Client::new(rocket(client)).unwrap();
let api_key_header = Header::new("X-API-Key", API_KEY);
let task = Task{
id: "test".to_owned(),
text: "testtest".to_owned(),
checked: true,
children: vec![],
};
let res = client
.post("/set")
.header(ContentType::JSON)
.header(api_key_header.clone())
.body(serde_json::to_string(&task).unwrap())
.dispatch();
assert_eq!(res.status(), Status::Ok);
let mut res = client
.get(format!("/{}", task.id))
.header(ContentType::JSON)
.header(api_key_header.clone())
.dispatch();
assert_eq!(res.status(), Status::Ok);
let s = res.body().unwrap().into_string().unwrap();
let target: Task = serde_json::from_str(&s).unwrap();
assert_eq!(target.id, task.id);
assert_eq!(target.text, task.text);
assert_eq!(target.checked, task.checked);
assert_eq!(target.children, task.children);
let res = client
.delete(format!("/{}", task.id))
.header(ContentType::JSON)
.header(api_key_header)
.dispatch();
assert_eq!(res.status(), Status::Ok);
}
}
|
// BucketSizer trait
#![forbid(unsafe_code)]
#![deny(missing_docs)]
use anyhow::Result;
use async_trait::async_trait;
use super::{
Bucket,
Buckets,
};
/// `BucketSizer` represents the required methods to list S3 buckets and find
/// their sizes.
///
/// This trait should be implemented by all `Client`s performing these tasks.
#[async_trait]
pub trait BucketSizer {
/// Returns a list of bucket names.
async fn buckets(&self) -> Result<Buckets>;
/// Returns the size of the given `bucket` in bytes.
async fn bucket_size(&self, bucket: &Bucket) -> Result<u64>;
}
|
mod instructions;
mod registers;
|
#[doc = "Register `HWCFGR` reader"]
pub type R = crate::R<HWCFGR_SPEC>;
#[doc = "Field `AXI` reader - AXI interface"]
pub type AXI_R = crate::FieldReader;
#[doc = "Field `FIFO` reader - FIFO depth"]
pub type FIFO_R = crate::FieldReader;
#[doc = "Field `PRES` reader - Prescaler"]
pub type PRES_R = crate::FieldReader;
#[doc = "Field `IDL` reader - ID Length"]
pub type IDL_R = crate::FieldReader;
#[doc = "Field `MMW` reader - Memory map write"]
pub type MMW_R = crate::FieldReader;
#[doc = "Field `MST` reader - Master"]
pub type MST_R = crate::FieldReader;
impl R {
#[doc = "Bits 0:3 - AXI interface"]
#[inline(always)]
pub fn axi(&self) -> AXI_R {
AXI_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:11 - FIFO depth"]
#[inline(always)]
pub fn fifo(&self) -> FIFO_R {
FIFO_R::new(((self.bits >> 4) & 0xff) as u8)
}
#[doc = "Bits 12:19 - Prescaler"]
#[inline(always)]
pub fn pres(&self) -> PRES_R {
PRES_R::new(((self.bits >> 12) & 0xff) as u8)
}
#[doc = "Bits 20:23 - ID Length"]
#[inline(always)]
pub fn idl(&self) -> IDL_R {
IDL_R::new(((self.bits >> 20) & 0x0f) as u8)
}
#[doc = "Bits 24:27 - Memory map write"]
#[inline(always)]
pub fn mmw(&self) -> MMW_R {
MMW_R::new(((self.bits >> 24) & 0x0f) as u8)
}
#[doc = "Bits 28:31 - Master"]
#[inline(always)]
pub fn mst(&self) -> MST_R {
MST_R::new(((self.bits >> 28) & 0x0f) as u8)
}
}
#[doc = "HW configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`hwcfgr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct HWCFGR_SPEC;
impl crate::RegisterSpec for HWCFGR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`hwcfgr::R`](R) reader structure"]
impl crate::Readable for HWCFGR_SPEC {}
#[doc = "`reset()` method sets HWCFGR to value 0x1130_0080"]
impl crate::Resettable for HWCFGR_SPEC {
const RESET_VALUE: Self::Ux = 0x1130_0080;
}
|
#![deny(clippy::all, clippy::pedantic)]
use std::collections::{BTreeMap, BTreeSet};
#[derive(Default)]
pub struct School(BTreeMap<u32, BTreeSet<String>>);
impl School {
pub fn new() -> Self {
Self(BTreeMap::new())
}
pub fn add(&mut self, grade: u32, student: &str) {
self.0
.entry(grade)
.or_insert_with(BTreeSet::new)
.insert(student.to_string());
}
pub fn grades(&self) -> Vec<u32> {
self.0.keys().cloned().collect()
}
pub fn grade(&self, grade: u32) -> Option<Vec<String>> {
self.0.get(&grade).map(|v| v.iter().cloned().collect())
}
}
|
//! Assembler directive handling.
use crate::asm::DbrState;
use crate::int::u24;
use crate::int::Int;
use crate::syn::atom::Directive;
use crate::syn::int::IntLit;
use crate::syn::operand::Operand;
use crate::syn::operand::Symbol;
/// A directive type, indicating a well-known assembler directive.
pub(in crate::asm) enum DirectiveType<'asm> {
/// The `.origin` directive, indicating to the assembler that the program
/// counter should unconditionally jump to the given argument.
Origin(u24),
/// The `.extern` directive, which indicates that a name is defined in another
/// file. If the bank the symbol is allocated in is not given, it is assumed
/// to be in the current bank.
Extern {
/// The external symbol name.
sym: Symbol<'asm>,
/// The bank, if different from the current one.
bank: Option<u8>,
},
/// The `.global` directive, which marks a symbol as exported from the current
/// file, usable in `.extern` directives elsewhere. It must appear after the
/// label is defined.
Global(Symbol<'asm>),
/// The `.bank auto` directive, which changes the DBR state known to the
/// assembler.
Bank(DbrState),
/// A generic directive for emitting straight literal data. `.data`, `.fill`,
/// and `.zero` are sugar for this directive.
Data {
/// The number of bytes to fill with.
count: usize,
/// The value to fill the region with. If empty, this is treated as if it
/// were a single, zero byte.
values: Vec<Int>,
},
}
impl<'asm> DirectiveType<'asm> {
/// Parses a well-known directive from the given syntax.
///
/// This function also handles directive synonyms, reducing them down to
/// something simple that the assembler can understand.
// TODO: A better error type?
pub fn from_syn(dir: &Directive<'asm>) -> Option<Self> {
let name = dir.sym.name.to_lowercase();
let dir = match name.as_str() {
".origin" | ".org" => match &dir.args[..] {
[Operand::Int(int)] => {
DirectiveType::Origin(u24::from_u32(int.value.to_u32()))
}
_ => return None,
},
".extern" => match &dir.args[..] {
[Operand::Symbol(sym)] => DirectiveType::Extern {
sym: *sym,
bank: None,
},
[Operand::Symbol(sym), Operand::Int(IntLit {
value: Int::I8(bank),
..
})] => DirectiveType::Extern {
sym: *sym,
bank: Some(*bank),
},
_ => return None,
},
".bank" => match &dir.args[..] {
[Operand::Symbol(Symbol { name, .. })] => match *name {
"auto" | "pc" => DirectiveType::Bank(DbrState::Pc),
"no_assume" | "else" | "unknown" => {
DirectiveType::Bank(DbrState::Else)
}
_ => return None,
},
[Operand::Int(IntLit {
value: Int::I8(bank),
..
})] => DirectiveType::Bank(DbrState::Fixed(*bank)),
_ => return None,
},
".global" | ".globl" => match &dir.args[..] {
[Operand::Symbol(sym)] => DirectiveType::Global(*sym),
_ => return None,
},
".data" | ".fill" | ".skip" | ".space" | ".zero" => {
let mut args = &dir.args[..];
if args.is_empty() {
return None;
}
let count = if name == ".data" {
1
} else {
if let Operand::Int(int) = &args[0] {
args = &args[1..];
int.value.to_u32() as usize
} else {
return None;
}
};
if name == ".zero" && args.len() != 0 {
return None;
}
let mut values = Vec::new();
for arg in args {
match arg {
Operand::Int(int) => values.push(int.value),
_ => return None,
}
}
DirectiveType::Data { count, values }
}
_ => return None,
};
Some(dir)
}
}
|
#[macro_export]
macro_rules! TEMPLATE_CHILD_ACC_CREATE {
($coint:tt, $($args:tt)*) => {
fmt!(
TEMPLATE_SCRIPT_MAIN,
imports = format!(r#"
use 0x1::{coin_type};
use 0x1::AccountCreationScripts;
"#, coin_type = $coint),
main_args = "s: signer",
main_body = fmt!("AccountCreationScripts::create_child_vasp_account<{coin_type}>(s, {child_address}, {auth_key_prefix}, {all_currencies}, {initial_bal});", coin_type = $coint, $($args)*)
)
}
} |
use std::os::raw::c_int;
#[allow(dead_code)] pub const RC_SUCCESS: c_int = 0;
#[allow(dead_code)] pub const RC_CITIZENSHIP_EXPIRED: c_int = 1;
#[allow(dead_code)] pub const RC_LAND_LIMIT_EXCEEDED: c_int = 2;
#[allow(dead_code)] pub const RC_NO_SUCH_CITIZEN: c_int = 3;
#[allow(dead_code)] pub const RC_LICENSE_PASSWORD_CONTAINS_SPACE: c_int = 5;
#[allow(dead_code)] pub const RC_LICENSE_PASSWORD_TOO_LONG: c_int = 6;
#[allow(dead_code)] pub const RC_LICENSE_PASSWORD_TOO_SHORT: c_int = 7;
#[allow(dead_code)] pub const RC_LICENSE_RANGE_TOO_LARGE: c_int = 8;
#[allow(dead_code)] pub const RC_LICENSE_RANGE_TOO_SMALL: c_int = 9;
#[allow(dead_code)] pub const RC_LICENSE_USERS_TOO_LARGE: c_int = 10;
#[allow(dead_code)] pub const RC_LICENSE_USERS_TOO_SMALL: c_int = 11;
#[allow(dead_code)] pub const RC_INVALID_PASSWORD: c_int = 13;
#[allow(dead_code)] pub const RC_LICENSE_WORLD_TOO_SHORT: c_int = 15;
#[allow(dead_code)] pub const RC_LICENSE_WORLD_TOO_LONG: c_int = 16;
#[allow(dead_code)] pub const RC_INVALID_WORLD: c_int = 20;
#[allow(dead_code)] pub const RC_SERVER_OUTDATED: c_int = 21;
#[allow(dead_code)] pub const RC_WORLD_ALREADY_STARTED: c_int = 22;
#[allow(dead_code)] pub const RC_NO_SUCH_WORLD: c_int = 27;
#[allow(dead_code)] pub const RC_UNAUTHORIZED: c_int = 32;
#[allow(dead_code)] pub const RC_NO_SUCH_LICENSE: c_int = 34;
#[allow(dead_code)] pub const RC_TOO_MANY_WORLDS: c_int = 57;
#[allow(dead_code)] pub const RC_MUST_UPGRADE: c_int = 58;
#[allow(dead_code)] pub const RC_BOT_LIMIT_EXCEEDED: c_int = 59;
#[allow(dead_code)] pub const RC_WORLD_EXPIRED: c_int = 61;
#[allow(dead_code)] pub const RC_CITIZEN_DOES_NOT_EXPIRE: c_int = 62;
#[allow(dead_code)] pub const RC_LICENSE_STARTS_WITH_NUMBER: c_int = 64;
#[allow(dead_code)] pub const RC_NO_SUCH_EJECTION: c_int = 66;
#[allow(dead_code)] pub const RC_NO_SUCH_SESSION: c_int = 67;
#[allow(dead_code)] pub const RC_WORLD_RUNNING: c_int = 72;
#[allow(dead_code)] pub const RC_WORLD_NOT_SET: c_int = 73;
#[allow(dead_code)] pub const RC_NO_SUCH_CELL: c_int = 74;
#[allow(dead_code)] pub const RC_NO_REGISTRY: c_int = 75;
#[allow(dead_code)] pub const RC_CANT_OPEN_REGISTRY: c_int = 76;
#[allow(dead_code)] pub const RC_CITIZEN_DISABLED: c_int = 77;
#[allow(dead_code)] pub const RC_WORLD_DISABLED: c_int = 78;
#[allow(dead_code)] pub const RC_TELEGRAM_BLOCKED: c_int = 85;
#[allow(dead_code)] pub const RC_UNABLE_TO_UPDATE_TERRAIN: c_int = 88;
#[allow(dead_code)] pub const RC_EMAIL_CONTAINS_INVALID_CHAR: c_int = 100;
#[allow(dead_code)] pub const RC_EMAIL_ENDS_WITH_BLANK: c_int = 101;
#[allow(dead_code)] pub const RC_NO_SUCH_OBJECT: c_int = 101;
#[allow(dead_code)] pub const RC_EMAIL_MISSING_DOT: c_int = 102;
#[allow(dead_code)] pub const RC_NOT_DELETE_OWNER: c_int = 102;
#[allow(dead_code)] pub const RC_EMAIL_MISSING_AT: c_int = 103;
#[allow(dead_code)] pub const RC_EMAIL_STARTS_WITH_BLANK: c_int = 104;
#[allow(dead_code)] pub const RC_EMAIL_TOO_LONG: c_int = 105;
#[allow(dead_code)] pub const RC_EMAIL_TOO_SHORT: c_int = 106;
#[allow(dead_code)] pub const RC_NAME_ALREADY_USED: c_int = 107;
#[allow(dead_code)] pub const RC_NAME_CONTAINS_NONALPHANUMERIC_CHAR: c_int = 108;
#[allow(dead_code)] pub const RC_NAME_CONTAINS_INVALID_BLANK: c_int = 109;
#[allow(dead_code)] pub const RC_NAME_ENDS_WITH_BLANK: c_int = 111;
#[allow(dead_code)] pub const RC_NAME_TOO_LONG: c_int = 112;
#[allow(dead_code)] pub const RC_NAME_TOO_SHORT: c_int = 113;
#[allow(dead_code)] pub const RC_PASSWORD_TOO_LONG: c_int = 115;
#[allow(dead_code)] pub const RC_PASSWORD_TOO_SHORT: c_int = 116;
#[allow(dead_code)] pub const RC_UNABLE_TO_DELETE_CITIZEN: c_int = 124;
#[allow(dead_code)] pub const RC_NUMBER_ALREADY_USED: c_int = 126;
#[allow(dead_code)] pub const RC_NUMBER_OUT_OF_RANGE: c_int = 127;
#[allow(dead_code)] pub const RC_PRIVILEGE_PASSWORD_IS_TOO_SHORT: c_int = 128;
#[allow(dead_code)] pub const RC_PRIVILEGE_PASSWORD_IS_TOO_LONG: c_int = 129;
#[allow(dead_code)] pub const RC_NOT_CHANGE_OWNER: c_int = 203;
#[allow(dead_code)] pub const RC_CANT_FIND_OLD_ELEMENT: c_int = 204;
#[allow(dead_code)] pub const RC_IMPOSTER: c_int = 212;
#[allow(dead_code)] pub const RC_ENCROACHES: c_int = 300;
#[allow(dead_code)] pub const RC_OBJECT_TYPE_INVALID: c_int = 301;
#[allow(dead_code)] pub const RC_TOO_MANY_BYTES: c_int = 303;
#[allow(dead_code)] pub const RC_UNREGISTERED_OBJECT: c_int = 306;
#[allow(dead_code)] pub const RC_ELEMENT_ALREADY_EXISTS: c_int = 308;
#[allow(dead_code)] pub const RC_RESTRICTED_COMMAND: c_int = 309;
#[allow(dead_code)] pub const RC_OUT_OF_BOUNDS: c_int = 311;
#[allow(dead_code)] pub const RC_RESTRICTED_OBJECT: c_int = 313;
#[allow(dead_code)] pub const RC_RESTRICTED_AREA: c_int = 314;
#[allow(dead_code)] pub const RC_NOT_YET: c_int = 401;
#[allow(dead_code)] pub const RC_TIMEOUT: c_int = 402;
#[allow(dead_code)] pub const RC_UNABLE_TO_CONTACT_UNIVERSE: c_int = 404;
#[allow(dead_code)] pub const RC_NO_CONNECTION: c_int = 439;
#[allow(dead_code)] pub const RC_NOT_INITIALIZED: c_int = 444;
#[allow(dead_code)] pub const RC_NO_INSTANCE: c_int = 445;
#[allow(dead_code)] pub const RC_INVALID_ATTRIBUTE: c_int = 448;
#[allow(dead_code)] pub const RC_TYPE_MISMATCH: c_int = 449;
#[allow(dead_code)] pub const RC_STRING_TOO_LONG: c_int = 450;
#[allow(dead_code)] pub const RC_READ_ONLY: c_int = 451;
#[allow(dead_code)] pub const RC_INVALID_INSTANCE: c_int = 453;
#[allow(dead_code)] pub const RC_VERSION_MISMATCH: c_int = 454;
#[allow(dead_code)] pub const RC_QUERY_IN_PROGRESS: c_int = 464;
#[allow(dead_code)] pub const RC_EJECTED: c_int = 466;
#[allow(dead_code)] pub const RC_NOT_WELCOME: c_int = 467;
#[allow(dead_code)] pub const RC_CONNECTION_LOST: c_int = 471;
#[allow(dead_code)] pub const RC_NOT_AVAILABLE: c_int = 474;
#[allow(dead_code)] pub const RC_CANT_RESOLVE_UNIVERSE_HOST: c_int = 500;
#[allow(dead_code)] pub const RC_INVALID_ARGUMENT: c_int = 505;
#[allow(dead_code)] pub const RC_UNABLE_TO_UPDATE_CAV: c_int = 514;
#[allow(dead_code)] pub const RC_UNABLE_TO_DELETE_CAV: c_int = 515;
#[allow(dead_code)] pub const RC_NO_SUCH_CAV: c_int = 516;
#[allow(dead_code)] pub const RC_WORLD_INSTANCE_ALREADY_EXISTS: c_int = 521;
#[allow(dead_code)] pub const RC_WORLD_INSTANCE_INVALID: c_int = 522;
#[allow(dead_code)] pub const RC_PLUGIN_NOT_AVAILABLE: c_int = 523;
#[allow(dead_code)] pub const RC_DATABASE_ERROR: c_int = 600;
#[allow(dead_code)] pub const RC_Z_BUF_ERROR: c_int = 4995;
#[allow(dead_code)] pub const RC_Z_MEM_ERROR: c_int = 4996;
#[allow(dead_code)] pub const RC_Z_DATA_ERROR: c_int = 4997;
#[allow(dead_code)] pub const RC_MESSAGE_LENGTH_BAD: c_int = 4;
#[allow(dead_code)] pub const RC_LICENSE_CONTAINS_INVALID_CHAR: c_int = 12;
#[allow(dead_code)] pub const RC_SERVER_OUT_OF_MEMORY: c_int = 17;
#[allow(dead_code)] pub const RC_NOT_LOGGED_IN: c_int = 31;
#[allow(dead_code)] pub const RC_WORLD_ALREADY_EXISTS: c_int = 33;
#[allow(dead_code)] pub const RC_NO_SUCH_ACTING_CITIZEN: c_int = 42;
#[allow(dead_code)] pub const RC_ACTING_PASSWORD_INVALID: c_int = 43;
#[allow(dead_code)] pub const RC_EJECTION_EXPIRED: c_int = 69;
#[allow(dead_code)] pub const RC_ACTING_CITIZEN_EXPIRED: c_int = 70;
#[allow(dead_code)] pub const RC_ALREADY_STARTED: c_int = 71;
#[allow(dead_code)] pub const RC_ACTING_CITIZEN_DISABLED: c_int = 80;
#[allow(dead_code)] pub const RC_TELEGRAM_TOO_LONG: c_int = 86;
#[allow(dead_code)] pub const RC_NAME_DOESNT_EXIST: c_int = 110;
#[allow(dead_code)] pub const RC_NAME_UNUSED: c_int = 114;
#[allow(dead_code)] pub const RC_PASSWORD_WRONG: c_int = 117;
#[allow(dead_code)] pub const RC_UNABLE_TO_DELETE_NAME: c_int = 119;
#[allow(dead_code)] pub const RC_UNABLE_TO_GET_CITIZEN: c_int = 120;
#[allow(dead_code)] pub const RC_UNABLE_TO_INSERT_CITIZEN: c_int = 121;
#[allow(dead_code)] pub const RC_UNABLE_TO_INSERT_NAME: c_int = 122;
#[allow(dead_code)] pub const RC_UNABLE_TO_PUT_CITIZEN_COUNT: c_int = 123;
#[allow(dead_code)] pub const RC_UNABLE_TO_CHANGE_ATTRIBUTE: c_int = 210;
#[allow(dead_code)] pub const RC_INVALID_REQUEST: c_int = 213;
#[allow(dead_code)] pub const RC_CANT_BUILD_HERE: c_int = 216;
#[allow(dead_code)] pub const RC_UNABLE_TO_STORE: c_int = 305;
#[allow(dead_code)] pub const RC_NO_BUILD_RIGHTS: c_int = 310;
#[allow(dead_code)] pub const RC_OUT_OF_MEMORY: c_int = 400;
#[allow(dead_code)] pub const RC_NULL_POINTER: c_int = 403;
#[allow(dead_code)] pub const RC_UNABLE_TO_CONTACT_WORLD: c_int = 405;
#[allow(dead_code)] pub const RC_INVALID_WORLD_NAME: c_int = 406;
#[allow(dead_code)] pub const RC_SEND_FAILED: c_int = 415;
#[allow(dead_code)] pub const RC_RECEIVE_FAILED: c_int = 416;
#[allow(dead_code)] pub const RC_STREAM_EMPTY: c_int = 421;
#[allow(dead_code)] pub const RC_STREAM_MESSAGE_TOO_LONG: c_int = 422;
#[allow(dead_code)] pub const RC_WORLD_NAME_TOO_LONG: c_int = 423;
#[allow(dead_code)] pub const RC_MESSAGE_TOO_LONG: c_int = 426;
#[allow(dead_code)] pub const RC_UNABLE_TO_CREATE_SOCKET: c_int = 428;
#[allow(dead_code)] pub const RC_UNABLE_TO_CONNECT: c_int = 429;
#[allow(dead_code)] pub const RC_UNABLE_TO_SET_NONBLOCKING: c_int = 430;
#[allow(dead_code)] pub const RC_CANT_OPEN_STREAM: c_int = 434;
#[allow(dead_code)] pub const RC_CANT_WRITE_STREAM: c_int = 435;
#[allow(dead_code)] pub const RC_CANT_CLOSE_STREAM: c_int = 436;
#[allow(dead_code)] pub const RC_UNABLE_TO_INITIALIZE_NETWORK: c_int = 442;
#[allow(dead_code)] pub const RC_INCORRECT_MESSAGE_LENGTH: c_int = 443;
#[allow(dead_code)] pub const RC_OUT_BUFFER_FULL: c_int = 446;
#[allow(dead_code)] pub const RC_INVALID_CALLBACK: c_int = 447;
#[allow(dead_code)] pub const RC_IN_BUFFER_FULL: c_int = 461;
#[allow(dead_code)] pub const RC_PROTOCOL_ERROR: c_int = 463;
#[allow(dead_code)] pub const RC_NO_STREAM: c_int = 473;
#[allow(dead_code)] pub const RC_OLD_UNIVERSE: c_int = 487;
#[allow(dead_code)] pub const RC_OLD_WORLD: c_int = 488;
#[allow(dead_code)] pub const RC_WORLD_NOT_RUNNING: c_int = 489;
|
mod functions;
fn main() {
loop {
println!("Welcome to MathTools, Please ask me what you want me to do?");
let entry = functions::get_input().to_lowercase();
if entry.trim() == "test" {
functions::do_thing();
println!("If it appeared, we are a happy people.");
}else if entry.trim() == "simple" {
functions::simple_arithmetic();
}else if entry.trim() == "rpn" {
functions::reverse_polish_notation(1 ,String::from(""));
}else if entry.trim() == "help" {
functions::help();
}else if entry.trim() == "exit" {
break;
}else if entry.trim() == "complex" {
functions::complex_arithmetic();
}else{
println!("not a valid command, type help for more help");
}
}
println!("Goodbye:");
}
|
use crate::server::mesh_server_handlers as handlers;
use crate::server::mesh_server_models::*;
use crate::server::mesh_server_options::*;
use crate::server::mesh_server_properties::*;
use std::convert::Infallible;
use std::sync::Arc;
use warp::{Filter, Rejection, Reply};
pub fn routes(
opts: Arc<MeshServerOptions>,
props: Arc<MeshServerProperties>,
) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {
get_blobs(opts.clone(), props.clone())
.or(get_blob(opts.clone(), props.clone()))
.or(create_blob(opts.clone(), props.clone()))
.or(delete_blob(opts.clone(), props.clone()))
}
/// GET v1/blobs
fn get_blobs(
opts: Arc<MeshServerOptions>,
props: Arc<MeshServerProperties>,
) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {
warp::path!("v1" / "blobs")
.and(warp::path::end())
.and(warp::get())
.and(warp::query::<ListQueryOptions>())
.and(with_options(opts))
.and(with_properties(props))
.and_then(handlers::get_blobs)
}
/// GET v1/blobs/:key
fn get_blob(
opts: Arc<MeshServerOptions>,
props: Arc<MeshServerProperties>,
) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {
warp::path!("v1" / "blobs" / String)
.and(warp::get())
.and(with_options(opts))
.and(with_properties(props))
.and_then(handlers::get_blob)
}
/// PUT v1/blobs/:key with body
fn create_blob(
opts: Arc<MeshServerOptions>,
props: Arc<MeshServerProperties>,
) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {
warp::path!("v1" / "blobs" / String)
.and(warp::put())
.and(warp::body::aggregate())
.and(with_options(opts))
.and(with_properties(props))
.and_then(handlers::create_blob)
}
/// DELETE v1/blobs/:key
fn delete_blob(
opts: Arc<MeshServerOptions>,
props: Arc<MeshServerProperties>,
) -> impl Filter<Extract = impl Reply, Error = Rejection> + Clone {
warp::path!("v1" / "blobs" / String)
.and(warp::delete())
.and(warp::query::<DeleteQueryOptions>())
.and(with_options(opts))
.and(with_properties(props))
.and_then(handlers::delete_blob)
}
fn with_options(
opts: Arc<MeshServerOptions>,
) -> impl Filter<Extract = (Arc<MeshServerOptions>,), Error = Infallible> + Clone {
warp::any().map(move || opts.clone())
}
fn with_properties(
props: Arc<MeshServerProperties>,
) -> impl Filter<Extract = (Arc<MeshServerProperties>,), Error = Infallible> + Clone {
warp::any().map(move || props.clone())
}
|
#[macro_use]
extern crate error_chain;
extern crate flate2;
extern crate libc;
extern crate libz_sys;
use std::io::prelude::*;
use std::io::{self, BufReader, Cursor};
use flate2::{Compress, Decompress, Flush, Status};
use flate2::raw::mz_stream;
pub use flate2::Compression;
use libz_sys::{Z_OK, inflateReset, inflateSetDictionary};
// libz-sys doesn't expose these, as they're undocumented and not
// generally useful, but per Mark Adler they're what we want:
// http://stackoverflow.com/a/39392530/69326
#[cfg(all(target_env = "msvc", target_pointer_width = "32"))]
extern {
fn deflateResetKeep(strm: *mut mz_stream) -> libc::c_int;
// This doesn't seem to actually work right.
//fn inflateResetKeep(strm: *mut mz_stream) -> libc::c_int;
}
#[cfg(not(all(target_env = "msvc", target_pointer_width = "32")))]
extern "system" {
fn deflateResetKeep(strm: *mut mz_stream) -> libc::c_int;
//fn inflateResetKeep(strm: *mut mz_stream) -> libc::c_int;
}
mod errors {
// Create the Error, ErrorKind, ResultExt, and Result types
error_chain! {
errors {
BlockSizeTooLarge
InvalidBlockSignature
BufferError
DecompressionError
}
foreign_links {
Io(::std::io::Error);
Flate(::flate2::DataError);
}
}
}
pub use errors::*;
/// Magic number at the start of MS-ZIP compressed data.
const MSZIP_SIGNATURE: [u8; 2] = [b'C', b'K'];
const SIG_LEN: usize = 2;
/// Maximum uncompressed bytes in an input chunk: 32KB.
pub const MAX_CHUNK: usize = 32768;
/// The maximum size of an MSZIP compressed block: 32KB + 12 bytes.
pub const MAX_BLOCK_SIZE: usize = MAX_CHUNK + 12;
fn run_compress<R: BufRead>(obj: &mut R, data: &mut Compress, mut dst: &mut [u8]) -> io::Result<(usize, usize)> {
// Cribbed from flate2-rs. Wish this was public API!
let mut total_read = 0;
let mut total_consumed = 0;
loop {
let (read, consumed, ret, eof);
{
let input = try!(obj.fill_buf());
eof = input.is_empty();
let before_out = data.total_out();
let before_in = data.total_in();
let flush = if eof {Flush::Finish} else {Flush::None};
ret = data.compress(input, &mut dst, flush);
read = (data.total_out() - before_out) as usize;
consumed = (data.total_in() - before_in) as usize;
}
obj.consume(consumed);
total_consumed += consumed;
total_read += read;
match ret {
// If we haven't ready any data and we haven't hit EOF yet,
// then we need to keep asking for more data because if we
// return that 0 bytes of data have been read then it will
// be interpreted as EOF.
Status::Ok |
Status::BufError if read == 0 && !eof && dst.len() > 0 => {
continue
}
Status::Ok |
Status::BufError |
Status::StreamEnd => return Ok((total_consumed, total_read)),
}
}
}
/// A single MSZIP block of compressed data.
pub struct MSZipBlock<'a> {
/// The original size of the input data that was compressed.
pub original_size: usize,
/// The compressed data.
pub data: &'a [u8],
}
/// An MSZIP compressor.
///
/// This structure will read data from an underlying `Read` stream and
/// produce MSZIP-compressed blocks.
pub struct MSZipEncoder<R: Read> {
reader: BufReader<R>,
compress: Compress,
out_buffer: Vec<u8>,
}
impl<R: Read> MSZipEncoder<R> {
/// Creates a new encoder which will read uncompressed data from `reader`
/// and emit compressed blocks when `read_block` is called.
pub fn new(reader: R, level: Compression) -> MSZipEncoder<R> {
MSZipEncoder {
reader: BufReader::with_capacity(MAX_CHUNK, reader),
compress: Compress::new(level, false),
out_buffer: vec![0; MAX_BLOCK_SIZE],
}
}
/// Reads a single MSZIP block of compressed data.
pub fn read_block<'a>(&'a mut self) -> Result<MSZipBlock<'a>> {
let (read, written) = {
let mut chunk = Cursor::new(try!(self.reader.fill_buf()));
let nbytes = chunk.get_ref().len();
if nbytes == 0 {
return Ok(MSZipBlock {
original_size: 0,
data: &self.out_buffer[..0],
});
}
// Prepend the MS-ZIP signature to each chunk.
&self.out_buffer[..SIG_LEN].copy_from_slice(&MSZIP_SIGNATURE);
try!(run_compress(&mut chunk, &mut self.compress, &mut self.out_buffer[SIG_LEN..]))
};
self.reader.consume(read);
unsafe {
// Reset the deflate compressor for the next block, since we've
// asked it to `Flush::Finish`, but keep its compression dictionary.
assert_eq!(Z_OK, deflateResetKeep(self.compress.get_raw()));
}
return Ok(MSZipBlock {
original_size: read,
data: &self.out_buffer[..written+SIG_LEN],
});
}
}
/// An MSZIP decompressor.
///
/// When MSZIP-compressed blocks are passed to this structure's `write_block`
/// method, the data will be decompressed and written to the underlying
/// `Write` stream.
pub struct MSZipDecoder<W: Write> {
writer: W,
decompress: Decompress,
buffer: Vec<u8>,
}
impl<W: Write> MSZipDecoder<W> {
/// Creates a new decoder which will write uncompressed data to `writer`.
pub fn new(writer: W) -> MSZipDecoder<W> {
MSZipDecoder {
writer: writer,
decompress: Decompress::new(false),
buffer: vec![0; MAX_CHUNK],
}
}
/// Decompresses the single MSZIP block `block`.
pub fn write_block(&mut self, block: &[u8]) -> Result<()> {
if block.len() > MAX_BLOCK_SIZE {
bail!(ErrorKind::BlockSizeTooLarge);
}
if &block[..MSZIP_SIGNATURE.len()] != MSZIP_SIGNATURE {
bail!(ErrorKind::InvalidBlockSignature);
}
println!("trying to decompress {} bytes", block.len() - MSZIP_SIGNATURE.len());
let last = self.decompress.total_out();
match try!(self.decompress.decompress(&block[MSZIP_SIGNATURE.len()..],
&mut self.buffer,
Flush::Finish)) {
Status::StreamEnd => {
let written = (self.decompress.total_out() - last) as usize;
println!("writing {} bytes to writer", written);
let decompressed = &self.buffer[..written];
try!(self.writer.write_all(decompressed));
unsafe {
// Reset the decompressor for the next block, but keep
// its decompression dictionary.
// We should use this, but either it doesn't work or
// I'm not using it right.
//assert_eq!(Z_OK, inflateResetKeep(self.decompress.get_raw()));
assert_eq!(Z_OK, inflateReset(self.decompress.get_raw()));
assert_eq!(Z_OK, inflateSetDictionary(self.decompress.get_raw(), decompressed.as_ptr(), decompressed.len() as u32));
}
}
Status::Ok => bail!(ErrorKind::DecompressionError),
Status::BufError => bail!(ErrorKind::BufferError),
}
Ok(())
}
/// Returns the underlying writer.
pub fn finish(self) -> Result<W> {
Ok(self.writer)
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::io::Cursor;
use std::u8;
#[macro_use]
#[cfg(windows)]
/// Wrappers for the Microsoft compression API so that on Windows we
/// can test interop with the system implementation.
mod sys {
#![allow(non_camel_case_types)]
extern crate winapi;
use super::super::*;
use std::io::Cursor;
use std::mem;
use std::ptr;
use std::result;
use self::winapi::minwindef::{BOOL, DWORD, LPVOID, TRUE, FALSE};
use self::winapi::winnt::{HANDLE, PVOID};
use self::winapi::basetsd::{SIZE_T, PSIZE_T};
const COMPRESS_ALGORITHM_MSZIP: DWORD = 2;
const COMPRESS_RAW: DWORD = 1 << 29;
type PCOMPRESS_ALLOCATION_ROUTINES = LPVOID;
type COMPRESSOR_HANDLE = HANDLE;
type DECOMPRESSOR_HANDLE = HANDLE;
type PCOMPRESSOR_HANDLE = *mut COMPRESSOR_HANDLE;
type PDECOMPRESSOR_HANDLE = *mut DECOMPRESSOR_HANDLE;
#[link(name = "cabinet")]
extern "system" {
fn CreateCompressor(Algorithm: DWORD,
AllocationRoutines: LPVOID,
CompressorHandle: PCOMPRESSOR_HANDLE) -> BOOL;
fn CloseCompressor(CompressorHandle: COMPRESSOR_HANDLE) -> BOOL;
fn Compress(CompressorHandle: COMPRESSOR_HANDLE,
UncompressedData: PVOID,
UncompressedDataSize: SIZE_T,
CompressedBuffer: PVOID,
CompressedBufferSize: SIZE_T,
CompressedDataSize: PSIZE_T) -> BOOL;
fn CreateDecompressor(Algorithm: DWORD,
AllocationRoutines: PCOMPRESS_ALLOCATION_ROUTINES,
DecompressorHandle: PDECOMPRESSOR_HANDLE) -> BOOL;
fn CloseDecompressor(DecompressorHandle: DECOMPRESSOR_HANDLE) -> BOOL;
fn Decompress(DecompressorHandle: DECOMPRESSOR_HANDLE,
CompressedData: PVOID,
CompressedDataSize: SIZE_T,
UncompressedBuffer: PVOID,
UncompressedBufferSize: SIZE_T,
UncompressedDataSize: PSIZE_T) -> BOOL;
}
/// Compress `data` with the Microsoft compression API.
fn do_system_compress(data: &[u8]) -> result::Result<Vec<Vec<u8>>, &'static str> {
let h = unsafe {
let mut h: COMPRESSOR_HANDLE = mem::uninitialized();
if CreateCompressor(COMPRESS_ALGORITHM_MSZIP | COMPRESS_RAW, ptr::null_mut(), &mut h as PCOMPRESSOR_HANDLE) == TRUE {
h
} else {
return Err("CreateCompressor failed");
}
};
// Result is a vec of blocks, each a vec of bytes.
let mut result = vec!();
for chunk in data.chunks(MAX_CHUNK) {
// Allocate compression buffer.
let mut buf = vec![0; MAX_BLOCK_SIZE];
// Run compression
unsafe {
let mut compressed_size: SIZE_T = mem::uninitialized();
if Compress(h, chunk.as_ptr() as PVOID, chunk.len() as SIZE_T, buf.as_ptr() as PVOID, buf.len() as SIZE_T, &mut compressed_size as PSIZE_T) == FALSE {
return Err("Compress failed");
}
buf.resize(compressed_size as usize, 0);
}
result.push(buf);
}
unsafe { CloseCompressor(h); }
Ok(result)
}
/// Decompress `chunks` through the Microsoft compression API.
fn do_system_decompress(chunks: &Vec<(usize, Vec<u8>)>) -> result::Result<Vec<u8>, &'static str> {
let h = unsafe {
let mut h: DECOMPRESSOR_HANDLE = mem::uninitialized();
if CreateDecompressor(COMPRESS_ALGORITHM_MSZIP | COMPRESS_RAW, ptr::null_mut(), &mut h as PDECOMPRESSOR_HANDLE) == TRUE {
h
} else {
return Err("CreateDecompressor failed");
}
};
let mut buf = vec!();
// Decompress each chunk in turn..
for &(original_size, ref chunk) in chunks.iter() {
assert!(original_size <= MAX_CHUNK);
// Make space in the output buffer.
let last = buf.len();
buf.resize(last + original_size, 0);
unsafe {
if Decompress(h, chunk.as_ptr() as PVOID, chunk.len() as SIZE_T, buf[last..].as_mut_ptr() as PVOID, original_size as SIZE_T, ptr::null_mut()) == FALSE {
return Err("Decompress failed");
}
}
}
unsafe { CloseDecompressor(h) };
Ok(buf)
}
/// Run `data` through `MSZipEncoder` and then the system decompressor
/// and assert that the end result is the same.
pub fn roundtrip_compress_system_decompressor(data: &[u8]) {
let mut compress = MSZipEncoder::new(Cursor::new(data), Compression::Default);
let mut chunks = vec!();
loop {
match compress.read_block() {
Ok(ref block) if block.data.len() > 0 => chunks.push((block.original_size, block.data.to_owned())),
_ => break,
}
}
let decompressed = do_system_decompress(&chunks).unwrap();
assert_eq!(data, &decompressed[..]);
}
/// Run `data` through the system compressor and then `MSZipDecoder`
/// and assert that the end result is the same.
pub fn roundtrip_system_compressor_decompress(data: &[u8]) {
let chunks = do_system_compress(&data).unwrap();
println!("compressed to {} chunks", chunks.len());
let mut decompress = MSZipDecoder::new(Cursor::new(vec!()));
for chunk in chunks {
decompress.write_block(&chunk).unwrap();
}
let decompressed = decompress.finish().unwrap().into_inner();
assert_eq!(data, &decompressed[..]);
}
// Insert tests for round-tripping through the system compressor
// and decompressor.
macro_rules! sys_tests {
($e:expr) => {
use super::sys::{roundtrip_compress_system_decompressor, roundtrip_system_compressor_decompress};
#[test]
fn test_roundtrip_system_decompressor() {
let data = $e;
roundtrip_compress_system_decompressor(&data);
}
#[test]
fn test_roundtrip_system_compressor() {
let data = $e;
roundtrip_system_compressor_decompress(&data);
}
}
}
}
#[macro_use]
#[cfg(not(windows))]
mod sys {
macro_rules! sys_tests {
($e:expr) => {}
}
}
// Make it easy to run the same tests on different data.
macro_rules! t {
($name:ident, $e:expr) => {
mod $name {
#![allow(unused_imports)]
use super::super::MAX_CHUNK;
use super::{roundtrip_compress, test_data};
#[test]
fn test_roundtrip_compress() {
let data = $e;
roundtrip_compress(&data);
}
sys_tests!($e);
}
}
}
/// Run `data` through `MSZipEncoder` and `MSZipDecoder` in sequence
/// and assert that the end result is the same.
fn roundtrip_compress(data: &[u8]) {
let mut compress = MSZipEncoder::new(Cursor::new(data), Compression::Default);
let mut decompress = MSZipDecoder::new(Cursor::new(vec!()));
loop {
match compress.read_block() {
Ok(ref block) if block.data.len() > 0 => decompress.write_block(block.data).unwrap(),
_ => break,
}
}
match decompress.finish() {
Ok(result) => {
let result = result.into_inner();
assert_eq!(data, &result[..]);
}
Err(e) => assert!(false, "Failed to finish decompression: {}", e),
}
}
/// Generate a `Vec<u8>` of test data of `size` bytes.
pub fn test_data(size: usize) -> Vec<u8> {
(0..size).map(|v| (v % (u8::max_value() as usize + 1)) as u8).collect::<Vec<u8>>()
}
t!(zeroes, vec![0; 1000]);
t!(zeroes_two_blocks, vec![0; MAX_CHUNK + 1000]);
t!(exactly_one_block, test_data(MAX_CHUNK));
t!(one_block_less_a_byte, test_data(MAX_CHUNK - 1));
t!(one_block_plus_a_byte, test_data(MAX_CHUNK + 1));
t!(nonzero, test_data(1000));
t!(nonzero_two_blocks, test_data(MAX_CHUNK + 1000));
t!(nonzero_many_blocks, test_data(MAX_CHUNK * 8));
}
|
use crate::switch::ToCKBCellDataTuple;
use crate::utils::verifier::{verify_capacity, verify_since_by_value};
use crate::utils::{
config::SINCE_SIGNER_TIMEOUT,
types::{Error, ToCKBCellDataView},
};
use core::result::Result;
pub fn verify(toCKB_data_tuple: &ToCKBCellDataTuple) -> Result<(), Error> {
let input_data = toCKB_data_tuple
.0
.as_ref()
.expect("inputs should contain toCKB cell");
let output_data = toCKB_data_tuple
.1
.as_ref()
.expect("outputs should contain toCKB cell");
verify_since_by_value(SINCE_SIGNER_TIMEOUT)?;
verify_capacity()?;
verify_data(input_data, output_data)?;
Ok(())
}
fn verify_data(
input_data: &ToCKBCellDataView,
output_data: &ToCKBCellDataView,
) -> Result<(), Error> {
if input_data.get_raw_lot_size() != output_data.get_raw_lot_size()
|| input_data.user_lockscript != output_data.user_lockscript
|| input_data.x_lock_address != output_data.x_lock_address
|| input_data.signer_lockscript != output_data.signer_lockscript
|| input_data.x_unlock_address != output_data.x_unlock_address
|| input_data.redeemer_lockscript != output_data.redeemer_lockscript
|| input_data.x_extra != output_data.x_extra
{
return Err(Error::InvariantDataMutated);
}
Ok(())
}
|
use crate::action::*;
use crate::agent::AgentManager;
use crate::display::Display;
use crate::grid::Grid;
use piston::input::RenderEvent;
pub struct Engine {
/// 2D grid, which is used for collision detection and 'tagging'
grid: Grid,
/// All required information on choosing actions
ac: ActionContext,
/// All agents and agent context and stats
am: AgentManager,
display: Option<Display>,
show_graphics: bool,
step_counter: usize,
}
impl Engine {
pub fn new(grid: Grid, ac: ActionContext, am: AgentManager, show_graphics: bool) -> Engine {
let mut display: Option<Display> = None;
if show_graphics {
display = Some(Display::new());
}
Engine {
grid,
ac,
am,
display,
show_graphics,
step_counter: 0,
}
}
fn update(&mut self) {
self.am.perform_actions(&self.grid, &self.ac);
self.grid.update(self.am.flush_log());
self.step_counter += 1;
}
pub fn step(&mut self) {
if self.show_graphics {
let display: &mut Display = self.display.as_mut().unwrap();
let maybe_e = display.events.next(&mut display.window);
if maybe_e.is_some() {
self.update();
let e = maybe_e.unwrap();
let render_objects = self.am.get_render_info();
let display: &mut Display = self.display.as_mut().unwrap();
if let Some(args) = e.render_args() {
display.graphics.render(&args, &render_objects);
}
}
} else {
self.update();
}
}
pub fn stats(&mut self) {
println!(
"Steps done: {} \nNumber of times tagged: {}",
self.step_counter,
self.am.get_tagged_count()
)
}
}
|
pub mod cli_opt;
pub mod db;
pub mod op;
pub mod task;
use rusqlite::Connection;
use std::collections::HashSet;
use std::sync::{Arc, Mutex};
pub type MutConn = Arc<Mutex<Connection>>;
pub type LabelList = HashSet<String>;
pub fn labellist_from_vec(labels: Vec<String>) -> LabelList {
let mut rv = LabelList::new();
for l in labels {
if !rv.contains(&l) {
rv.insert(l);
}
}
rv
}
//use std::convert::Infallible;
//use std::str::FromStr;
//impl FromStr for LabelList {
// type Err = Infallible;
// fn from_str(s: &str) -> Result<Self, Self::Err> {
// let result = HashSet::new();
// result.insert(s.to_string());
// Ok(result)
// }
//}
|
use dotenv::dotenv;
use once_cell::sync::Lazy;
use std::env;
#[derive(Debug)]
pub struct Config {
pub database_url: String,
pub allowed_origin: String,
pub public_dir: String,
pub firebase_project_id: String,
}
impl Config {
fn from_env() -> Config {
dotenv().ok();
let database_url =
env::var("DATABASE_URL").expect("environment variable DATABASE_URL is not defined");
let allowed_origin =
env::var("ALLOWED_ORIGIN").expect("environment variable ALLOWED_ORIGIN is not defined");
let public_dir = public_dir();
let firebase_project_id = env::var("FIREBASE_PROJECT_ID")
.expect("environment variable FIREBASE_PROJECT_ID is not defined");
Config {
database_url,
allowed_origin,
public_dir,
firebase_project_id,
}
}
}
fn public_dir() -> String {
let mut current_dir = std::env::current_dir().expect("couldn't get current directory.");
current_dir.push("public");
current_dir
.into_os_string()
.into_string()
.expect("coudn't covert public directory to String.")
}
pub static CONFIG: Lazy<Config> = Lazy::new(|| Config::from_env());
|
use std::fmt;
use strum_macros::{EnumIter, EnumString};
#[derive(Debug, PartialEq, EnumString, EnumIter)]
pub enum Fips {
None,
// ENUM START
AE,
AF,
AG,
AJ,
AL,
AM,
AN,
AO,
AR,
AU,
BC,
BE,
BG,
BH,
BK,
BL,
BM,
BN,
BO,
BR,
BT,
BU,
BX,
BY,
CA,
CB,
CD,
CF,
CG,
CH,
CI,
CM,
CO,
CS,
CT,
CU,
DA,
DJ,
DR,
EC,
EG,
EI,
EK,
EN,
ER,
ES,
ET,
EZ,
FG,
FI,
FR,
GA,
GB,
GG,
GH,
GI,
GM,
GR,
GT,
GV,
GY,
HA,
HO,
HR,
HU,
ID,
IN,
IR,
IS,
IT,
IV,
IZ,
JO,
KE,
KG,
KN,
KS,
KU,
KZ,
LA,
LE,
LG,
LH,
LI,
LO,
LS,
LT,
LU,
LY,
MD,
MG,
MI,
MJ,
MK,
ML,
MN,
MO,
MR,
MU,
MX,
MY,
MZ,
NG,
NI,
NL,
NN,
NO,
NP,
NS,
NT,
NU,
PA,
PE,
PK,
PL,
PM,
PO,
PP,
PU,
QA,
RI,
RN,
RO,
RS,
RW,
SA,
SF,
SG,
SI,
SL,
SM,
SO,
SP,
SU,
SW,
SY,
SZ,
TH,
TI,
TO,
TS,
TT,
TU,
TX,
TZ,
UG,
UK,
UP,
US,
UV,
UY,
UZ,
VE,
VM,
VT,
WA,
WE,
WI,
WZ,
YI,
YM,
ZA,
ZI,
// ENUM END
}
impl Fips {
pub fn is_none(&self) -> bool {
match *self {
Fips::None => true,
_ => false,
}
}
}
impl fmt::Display for Fips {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
|
use super::{
models::{Account, AccountWithId},
rand_str,
};
use crate::database::{schema::accounts::dsl::*, Database};
use anyhow::{bail, Context, Result};
use diesel::{associations::HasTable, prelude::*, SqliteConnection};
use log::{debug, warn};
use once_cell::sync::Lazy;
use std::io::{self, BufRead, Write};
use strum::AsRefStr;
use tokio::sync::Mutex;
#[allow(dead_code)]
#[derive(AsRefStr)]
pub(super) enum AccountRole {
Root,
Admin,
User,
}
#[allow(dead_code)]
#[derive(AsRefStr)]
pub(super) enum AccountType {
Internal,
Ldap,
}
pub static SALT: Lazy<Mutex<String>> = Lazy::new(|| Mutex::new(String::new()));
pub async fn setup_root(conn: &SqliteConnection) -> Result<()> {
let records = accounts
.filter(role.eq(AccountRole::Root.as_ref()))
.load::<AccountWithId>(conn)?;
if records.len() > 0 {
let mut salt_str = SALT.lock().await;
*salt_str = records[0].salt.clone();
debug!("user salt is {}", salt_str);
return Ok(());
}
print!("input super admin username: ");
io::stdout().flush().unwrap();
let stdin = io::stdin();
let mut root_name = String::new();
stdin.lock().read_line(&mut root_name).unwrap();
let root_pass = rpassword::prompt_password_stdout("input super admin password: ")?;
*SALT.lock().await = rand_str(32);
let mut account = Account::new(
root_name.trim(),
AccountType::Internal.as_ref(),
AccountRole::Root.as_ref(),
);
account.password(root_pass).await;
diesel::insert_into(accounts::table())
.values(account)
.execute(conn)?;
Ok(())
}
pub fn get_user_by_token(db: &Database, tk: impl AsRef<str>) -> Result<Account> {
let records = accounts
.filter(token.eq(tk.as_ref()))
.load::<AccountWithId>(&db.connection)?;
match records.len() {
1 => Ok(records.into_iter().nth(0).unwrap().into()),
0 => bail!("invalid token {}", tk.as_ref()),
_ => bail!(
"more then one user has same token {}, impossible!",
tk.as_ref()
),
}
}
pub fn get_user_by_name(db: &Database, name: impl AsRef<str>) -> Result<Option<Account>> {
let records = accounts
.filter(username.eq(name.as_ref()))
.load::<AccountWithId>(&db.connection)?;
match records.len() {
1 => Ok(Some(records.into_iter().nth(0).unwrap().into())),
0 => {
warn!("{} not found", name.as_ref());
Ok(None)
}
_ => bail!(
"found user {} more than once, database corrupted",
name.as_ref()
),
}
}
impl Account {
pub fn update(&self, db: &Database) -> anyhow::Result<()> {
diesel::update(accounts.filter(username.eq(&self.username)))
.set(self)
.execute(&db.connection)
.context("save account to db failed")?;
Ok(())
}
pub fn insert(&self, db: &Database) -> anyhow::Result<()> {
diesel::insert_into(accounts::table())
.values(self)
.execute(&db.connection)
.context("insert account to db failed")?;
Ok(())
}
}
|
use std::fmt::{self, Write};
use std::str::FromStr;
use std::iter::Iterator;
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub enum Cell {
Vertical, // |
Horizontal, // -
DirectionChange, // +
Letter(char), // ex. A
Empty,
}
#[derive(Debug)]
enum Direction {
Up,
Right,
Down,
Left,
}
pub struct Diagram {
source: Vec<Vec<Cell>>,
direction: Direction,
current: Option<(usize, usize)>
}
impl Diagram {
#[inline]
fn probe(&self, x: usize, y: usize) -> Cell {
let row = self.source.get(y);
if row.is_none() {
return Cell::Empty;
}
let cell = row.unwrap().iter().nth(x);
if cell.is_none() {
return Cell::Empty;
}
*cell.unwrap()
}
// Based on the `self.current` (should be '+')
fn get_direction(&self) -> Direction {
let (x, y) = self.current.unwrap();
match self.direction {
Direction::Up | Direction::Down => {
if Cell::Empty != self.probe(x - 1, y) {
return Direction::Left;
}
if Cell::Empty != self.probe(x + 1, y) {
return Direction::Right;
}
},
Direction::Left | Direction::Right => {
if Cell::Empty != self.probe(x, y - 1) {
return Direction::Up;
}
if Cell::Empty != self.probe(x, y + 1) {
return Direction::Down;
}
}
}
unreachable!()
}
}
impl Iterator for Diagram {
type Item = Cell;
fn next(&mut self) -> Option<Self::Item> {
match self.current {
None => {
let y = 0;
let x = self.source[y].iter()
.position(|cell| *cell != Cell::Empty)
.unwrap();
assert_ne!(Cell::Empty, self.source[y][x]);
self.current = Some((x, y));
Some(self.source[y][x])
},
Some((x, y)) => {
if let Cell::DirectionChange = self.source[y][x] {
self.direction = self.get_direction();
}
let (new_x, new_y) = match self.direction {
Direction::Up => (x, y - 1),
Direction::Right => (x + 1, y),
Direction::Down => (x, y + 1),
Direction::Left => (x - 1, y),
};
match self.source[new_y][new_x] {
Cell::Empty => {
None
},
value => {
self.current = Some((new_x, new_y));
Some(value)
}
}
}
}
}
}
impl FromStr for Diagram {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
let source = s.lines()
.map(|line| {
line.chars()
.map(|chr| {
match chr {
'|' => Cell::Vertical,
'-' => Cell::Horizontal,
'+' => Cell::DirectionChange,
letter @ 'A'...'Z' => Cell::Letter(letter),
_ => Cell::Empty,
}
})
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
Ok(Diagram {
source,
direction: Direction::Down,
current: None,
})
}
}
impl fmt::Debug for Diagram {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for row in &self.source {
for cell in row {
let chr = match *cell {
Cell::Vertical => '|',
Cell::Horizontal => '-',
Cell::DirectionChange => '+',
Cell::Letter(letter) => letter,
_ => ' '
};
f.write_char(chr)?;
}
f.write_char('\n')?;
}
f.write_char('\n')
}
}
#[cfg(test)]
mod tests {
}
|
use game::{Command};
use hex2d::{Angle, Direction};
use std::iter::{repeat};
static YX_SYMBOLS: [char; 6] = [
'p', '\'', '!', '.', '0', '3'
];
static XY_SYMBOLS: [char; 6] = [
'b', 'c', 'e', 'f', 'y', '2'
];
static ZX_SYMBOLS: [char; 6] = [ // SW.
'a', 'g', 'h', 'i', 'j', '4'
];
static ZY_SYMBOLS: [char; 6] = [ // SE.
'l', 'm', 'n', 'o', ' ', '5'
];
static L_SYMBOLS: [char; 6] = [
'k', 's', 't', 'u', 'w', 'x'
];
static R_SYMBOLS: [char; 6] = [
'd', 'q', 'r', 'v', 'z', '1'
];
fn get_symbols(c: &Command) -> [char; 6] {
match c {
&Command::Move(Direction::YX) => YX_SYMBOLS,
&Command::Move(Direction::XY) => XY_SYMBOLS,
&Command::Move(Direction::ZX) => ZX_SYMBOLS,
&Command::Move(Direction::ZY) => ZY_SYMBOLS,
&Command::Rotate(Angle::Left) => L_SYMBOLS,
&Command::Rotate(Angle::Right) => R_SYMBOLS,
_ => panic!("Wrong command")
}
}
fn symbol_to_command(sym: char) -> Command {
if YX_SYMBOLS.iter().any(|s| sym == *s) {
Command::Move(Direction::YX)
}
else if XY_SYMBOLS.iter().any(|s| sym == *s) {
Command::Move(Direction::XY)
}
else if ZX_SYMBOLS.iter().any(|s| sym == *s) {
Command::Move(Direction::ZX)
}
else if ZY_SYMBOLS.iter().any(|s| sym == *s) {
Command::Move(Direction::ZY)
}
else if L_SYMBOLS.iter().any(|s| sym == *s) {
Command::Rotate(Angle::Left)
}
else if R_SYMBOLS.iter().any(|s| sym == *s) {
Command::Rotate(Angle::Right)
}
else {
panic!("No power!")
}
}
pub fn phrase_to_commands(phrase: &String) -> Vec<Command> {
phrase.chars().map(symbol_to_command).collect()
}
fn place_phrase(phrase: &String,
commands: &Vec<Command>,
result: &mut Vec<char>,
used: &mut Vec<bool>) {
let seq = phrase_to_commands(phrase);
for i in 0..commands.len() - seq.len() {
let mut flag = false;
for j in 0..seq.len() {
if used[i + j] || seq[j] != commands[i + j] {
flag = true;
break;
}
}
if flag {
continue;
}
for j in 0..seq.len() {
result[i + j] = phrase.chars().nth(j).unwrap();
used[i + j] = true;
}
}
}
pub fn encode(commands: &Vec<Command>, power_phrases: &Vec<String>) -> String {
let mut sorted_phrases = power_phrases.clone();
sorted_phrases.sort_by(|a, b| a.len().cmp(&b.len()));
let mut result = repeat('*').take(commands.len()).collect::<Vec<_>>();
let mut used = repeat(false).take(commands.len()).collect::<Vec<_>>();
for p in &sorted_phrases {
place_phrase(p, commands, &mut result, &mut used);
};
for i in 0..result.len() {
if !used[i] {
result[i] = get_symbols(&commands[i])[0];
}
};
result.into_iter().collect()
}
#[test]
fn encode_it_right() {
assert!(phrase_to_commands(&"ei!".to_string()) ==
vec![Command::Move(Direction::XY),
Command::Move(Direction::ZX),
Command::Move(Direction::YX)]);
}
|
/// EditOrgOption options for editing an organization
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct EditOrgOption {
pub description: Option<String>,
pub full_name: Option<String>,
pub location: Option<String>,
pub repo_admin_change_team_access: Option<bool>,
/// possible values are `public`, `limited` or `private`
pub visibility: Option<crate::edit_org_option::EditOrgOptionVisibility>,
pub website: Option<String>,
}
/// possible values are `public`, `limited` or `private`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[allow(non_camel_case_types)]
pub enum EditOrgOptionVisibility {
#[serde(rename = "public")]
Public,
#[serde(rename = "limited")]
Limited,
#[serde(rename = "private")]
Private,
}
impl Default for EditOrgOptionVisibility {
fn default() -> Self {
EditOrgOptionVisibility::Public
}
}
impl EditOrgOption {
/// Create a builder for this object.
#[inline]
pub fn builder() -> EditOrgOptionBuilder {
EditOrgOptionBuilder {
body: Default::default(),
}
}
#[inline]
pub fn org_edit() -> EditOrgOptionPatchBuilder<crate::generics::MissingOrg> {
EditOrgOptionPatchBuilder {
inner: Default::default(),
_param_org: core::marker::PhantomData,
}
}
}
impl Into<EditOrgOption> for EditOrgOptionBuilder {
fn into(self) -> EditOrgOption {
self.body
}
}
impl Into<EditOrgOption> for EditOrgOptionPatchBuilder<crate::generics::OrgExists> {
fn into(self) -> EditOrgOption {
self.inner.body
}
}
/// Builder for [`EditOrgOption`](./struct.EditOrgOption.html) object.
#[derive(Debug, Clone)]
pub struct EditOrgOptionBuilder {
body: self::EditOrgOption,
}
impl EditOrgOptionBuilder {
#[inline]
pub fn description(mut self, value: impl Into<String>) -> Self {
self.body.description = Some(value.into());
self
}
#[inline]
pub fn full_name(mut self, value: impl Into<String>) -> Self {
self.body.full_name = Some(value.into());
self
}
#[inline]
pub fn location(mut self, value: impl Into<String>) -> Self {
self.body.location = Some(value.into());
self
}
#[inline]
pub fn repo_admin_change_team_access(mut self, value: impl Into<bool>) -> Self {
self.body.repo_admin_change_team_access = Some(value.into());
self
}
/// possible values are `public`, `limited` or `private`
#[inline]
pub fn visibility(mut self, value: crate::edit_org_option::EditOrgOptionVisibility) -> Self {
self.body.visibility = Some(value.into());
self
}
#[inline]
pub fn website(mut self, value: impl Into<String>) -> Self {
self.body.website = Some(value.into());
self
}
}
/// Builder created by [`EditOrgOption::org_edit`](./struct.EditOrgOption.html#method.org_edit) method for a `PATCH` operation associated with `EditOrgOption`.
#[repr(transparent)]
#[derive(Debug, Clone)]
pub struct EditOrgOptionPatchBuilder<Org> {
inner: EditOrgOptionPatchBuilderContainer,
_param_org: core::marker::PhantomData<Org>,
}
#[derive(Debug, Default, Clone)]
struct EditOrgOptionPatchBuilderContainer {
body: self::EditOrgOption,
param_org: Option<String>,
}
impl<Org> EditOrgOptionPatchBuilder<Org> {
/// name of the organization to edit
#[inline]
pub fn org(mut self, value: impl Into<String>) -> EditOrgOptionPatchBuilder<crate::generics::OrgExists> {
self.inner.param_org = Some(value.into());
unsafe { std::mem::transmute(self) }
}
#[inline]
pub fn description(mut self, value: impl Into<String>) -> Self {
self.inner.body.description = Some(value.into());
self
}
#[inline]
pub fn full_name(mut self, value: impl Into<String>) -> Self {
self.inner.body.full_name = Some(value.into());
self
}
#[inline]
pub fn location(mut self, value: impl Into<String>) -> Self {
self.inner.body.location = Some(value.into());
self
}
#[inline]
pub fn repo_admin_change_team_access(mut self, value: impl Into<bool>) -> Self {
self.inner.body.repo_admin_change_team_access = Some(value.into());
self
}
/// possible values are `public`, `limited` or `private`
#[inline]
pub fn visibility(mut self, value: crate::edit_org_option::EditOrgOptionVisibility) -> Self {
self.inner.body.visibility = Some(value.into());
self
}
#[inline]
pub fn website(mut self, value: impl Into<String>) -> Self {
self.inner.body.website = Some(value.into());
self
}
}
impl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for EditOrgOptionPatchBuilder<crate::generics::OrgExists> {
type Output = crate::organization::Organization;
const METHOD: http::Method = http::Method::PATCH;
fn rel_path(&self) -> std::borrow::Cow<'static, str> {
format!("/orgs/{org}", org=self.inner.param_org.as_ref().expect("missing parameter org?")).into()
}
fn modify(&self, req: Client::Request) -> Result<Client::Request, crate::client::ApiError<Client::Response>> {
use crate::client::Request;
Ok(req
.json(&self.inner.body))
}
}
|
// frame.rs ---
//
// Filename: frame.rs
// Author: Jules <archjules>
// Created: Fri Mar 31 10:43:56 2017 (+0200)
// Last-Updated: Fri Mar 31 15:28:27 2017 (+0200)
// By: Jules <archjules>
//
use std::ops::{Index,IndexMut};
#[derive(Debug, Clone)]
pub struct Frame {
local: Vec<u16>,
pc: u32
}
impl Frame {
pub fn new_with_pc(pc: u32) -> Frame {
Frame {
local: vec![],
pc: pc,
}
}
pub fn pc(&self) -> u32 {
self.pc
}
pub fn set_pc(&mut self, value: u32) {
self.pc = value;
}
pub fn create_locals(&mut self, n: usize) {
self.local.extend(vec![0; n]);
}
pub fn len(&self) -> usize {
self.local.len()
}
}
impl Index<usize> for Frame {
type Output = u16;
fn index(&self, idx: usize) -> &u16 {
self.local.index(idx)
}
}
impl IndexMut<usize> for Frame {
fn index_mut(&mut self, idx: usize) -> &mut u16 {
self.local.index_mut(idx)
}
}
|
use crate::game::camera::Camera;
use crate::io::input::Input;
use crate::map::line::Line;
use crate::map::sector::Sector;
use crate::math::vector::Vector2;
use crate::things::hero::Hero;
use crate::world::world::World;
const SECTOR_NO_SURFACE: i32 = -1;
const LINE_NO_WALL: i32 = -1;
const TEXTURE_GRASS: i32 = 0;
const TEXTURE_STONE: i32 = 1;
pub struct Game {
pub input: Input,
pub world: World,
pub camera: Camera,
}
fn place_house(world: &mut World, x: f32, y: f32) {
const COUNT: usize = 12;
let mut vecs = Vec::with_capacity(COUNT);
vecs.push(Vector2::new(x, y));
vecs.push(Vector2::new(x, y + 20.0));
vecs.push(Vector2::new(x + 6.0, y + 20.0));
vecs.push(Vector2::new(x + 6.0, y + 19.0));
vecs.push(Vector2::new(x + 1.0, y + 19.0));
vecs.push(Vector2::new(x + 1.0, y + 1.0));
vecs.push(Vector2::new(x + 19.0, y + 1.0));
vecs.push(Vector2::new(x + 19.0, y + 19.0));
vecs.push(Vector2::new(x + 14.0, y + 19.0));
vecs.push(Vector2::new(x + 14.0, y + 20.0));
vecs.push(Vector2::new(x + 20.0, y + 20.0));
vecs.push(Vector2::new(x + 20.0, y));
let mut lines = Vec::with_capacity(COUNT);
let mut k: usize = COUNT - 1;
for i in 0..COUNT {
lines.push(Line::new(LINE_NO_WALL, TEXTURE_STONE, LINE_NO_WALL, vecs[k], vecs[i]));
k = i;
}
let bottom: f32 = 0.0;
let floor: f32 = 0.0;
let ceiling: f32 = 10.0;
let top: f32 = 0.0;
let lines = world.push_lines(lines);
let sector = Sector::new(bottom, floor, ceiling, top, TEXTURE_GRASS, SECTOR_NO_SURFACE, vecs, lines);
world.push_sector(sector);
}
fn place_grass(world: &mut World) {
let mut vecs = Vec::with_capacity(4);
vecs.push(Vector2::new(0.0, 0.0));
vecs.push(Vector2::new(0.0, 50.0));
vecs.push(Vector2::new(60.0, 50.0));
vecs.push(Vector2::new(60.0, 0.0));
let lines = Vec::new();
let bottom: f32 = 0.0;
let floor: f32 = 0.0;
let ceiling: f32 = 10.0;
let top: f32 = 0.0;
let sector = Sector::new(bottom, floor, ceiling, top, TEXTURE_GRASS, SECTOR_NO_SURFACE, vecs, lines);
world.push_sector(sector);
}
fn place(world: &mut World) {
place_grass(world);
place_house(world, 10.0, 10.0);
place_house(world, 40.0, 60.0);
world.build();
Hero::new(world, 10.0, 40.0);
// Baron::new(world, 8.0, 45.0);
// Blood::new(world, 5.0, 1.0, 30.0);
// Tree::new(world, 14.0, 42.0);
}
impl Game {
pub fn new() -> Self {
let mut world = World::new();
place(&mut world);
Game {
world,
camera: Camera::new(0.0, 0.0, 0.0, 0.0, 0.0, 6.0),
input: Input::new(),
}
}
pub fn update(&mut self) {
self.world.update();
let input = &self.input;
let camera = &mut self.camera;
if input.look_left {
camera.ry -= 0.05;
if camera.ry < 0.0 {
camera.ry += 2.0 * std::f32::consts::PI;
}
}
if input.look_right {
camera.ry += 0.05;
if camera.ry >= 2.0 * std::f32::consts::PI {
camera.ry -= 2.0 * std::f32::consts::PI;
}
}
if input.look_up {
camera.rx -= 0.05;
if camera.rx < 0.0 {
camera.rx += 2.0 * std::f32::consts::PI;
}
}
if input.look_down {
camera.rx += 0.05;
if camera.rx >= 2.0 * std::f32::consts::PI {
camera.rx -= 2.0 * std::f32::consts::PI;
}
}
// let target = &self.world.things[0];
// camera.update_orbit(target);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.