text stringlengths 8 4.13M |
|---|
//! The global part of a plan implementation.
use super::controller_collector_context::ControllerCollectorContext;
use super::PlanConstraints;
use crate::mmtk::MMTK;
use crate::plan::transitive_closure::TransitiveClosure;
use crate::plan::Mutator;
use crate::policy::immortalspace::ImmortalSpace;
use crate::policy::largeobjectspace::LargeObjectSpace;
use crate::policy::space::Space;
use crate::scheduler::gc_work::ProcessEdgesWork;
use crate::scheduler::*;
use crate::util::alloc::allocators::AllocatorSelector;
#[cfg(feature = "analysis")]
use crate::util::analysis::AnalysisManager;
use crate::util::conversions::bytes_to_pages;
use crate::util::heap::layout::heap_layout::Mmapper;
use crate::util::heap::layout::heap_layout::VMMap;
use crate::util::heap::layout::map::Map;
use crate::util::heap::HeapMeta;
use crate::util::heap::VMRequest;
use crate::util::options::PlanSelector;
use crate::util::options::{Options, UnsafeOptionsWrapper};
use crate::util::statistics::stats::Stats;
use crate::util::OpaquePointer;
use crate::util::{Address, ObjectReference};
use crate::vm::*;
use downcast_rs::Downcast;
use enum_map::EnumMap;
use std::cell::UnsafeCell;
use std::marker::PhantomData;
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::{Arc, Mutex};
/// A GC worker's context for copying GCs.
/// Each GC plan should provide their implementation of a CopyContext.
/// For non-copying GC, NoCopy can be used.
pub trait CopyContext: 'static + Sync + Send {
type VM: VMBinding;
fn constraints(&self) -> &'static PlanConstraints;
fn init(&mut self, tls: OpaquePointer);
fn prepare(&mut self);
fn release(&mut self);
fn alloc_copy(
&mut self,
original: ObjectReference,
bytes: usize,
align: usize,
offset: isize,
semantics: AllocationSemantics,
) -> Address;
fn post_copy(
&mut self,
_obj: ObjectReference,
_tib: Address,
_bytes: usize,
_semantics: AllocationSemantics,
) {
}
fn copy_check_allocator(
&self,
_from: ObjectReference,
bytes: usize,
align: usize,
semantics: AllocationSemantics,
) -> AllocationSemantics {
let large = crate::util::alloc::allocator::get_maximum_aligned_size::<Self::VM>(
bytes,
align,
Self::VM::MIN_ALIGNMENT,
) > self.constraints().max_non_los_copy_bytes;
if large {
AllocationSemantics::Los
} else {
semantics
}
}
}
pub struct NoCopy<VM: VMBinding>(PhantomData<VM>);
impl<VM: VMBinding> CopyContext for NoCopy<VM> {
type VM = VM;
fn init(&mut self, _tls: OpaquePointer) {}
fn constraints(&self) -> &'static PlanConstraints {
unreachable!()
}
fn prepare(&mut self) {}
fn release(&mut self) {}
fn alloc_copy(
&mut self,
_original: ObjectReference,
_bytes: usize,
_align: usize,
_offset: isize,
_semantics: AllocationSemantics,
) -> Address {
unreachable!()
}
}
impl<VM: VMBinding> NoCopy<VM> {
pub fn new(_mmtk: &'static MMTK<VM>) -> Self {
Self(PhantomData)
}
}
impl<VM: VMBinding> WorkerLocal for NoCopy<VM> {
fn init(&mut self, tls: OpaquePointer) {
CopyContext::init(self, tls);
}
}
pub fn create_mutator<VM: VMBinding>(
tls: OpaquePointer,
mmtk: &'static MMTK<VM>,
) -> Box<Mutator<VM>> {
Box::new(match mmtk.options.plan {
PlanSelector::NoGC => crate::plan::nogc::mutator::create_nogc_mutator(tls, &*mmtk.plan),
PlanSelector::SemiSpace => {
crate::plan::semispace::mutator::create_ss_mutator(tls, &*mmtk.plan)
}
PlanSelector::GenCopy => crate::plan::gencopy::mutator::create_gencopy_mutator(tls, mmtk),
PlanSelector::MarkSweep => {
crate::plan::marksweep::mutator::create_ms_mutator(tls, &*mmtk.plan)
}
})
}
pub fn create_plan<VM: VMBinding>(
plan: PlanSelector,
vm_map: &'static VMMap,
mmapper: &'static Mmapper,
options: Arc<UnsafeOptionsWrapper>,
scheduler: &'static MMTkScheduler<VM>,
) -> Box<dyn Plan<VM = VM>> {
match plan {
PlanSelector::NoGC => Box::new(crate::plan::nogc::NoGC::new(
vm_map, mmapper, options, scheduler,
)),
PlanSelector::SemiSpace => Box::new(crate::plan::semispace::SemiSpace::new(
vm_map, mmapper, options, scheduler,
)),
PlanSelector::GenCopy => Box::new(crate::plan::gencopy::GenCopy::new(
vm_map, mmapper, options, scheduler,
)),
PlanSelector::MarkSweep => Box::new(crate::plan::marksweep::MarkSweep::new(
vm_map, mmapper, options, scheduler,
)),
}
}
/// A plan describes the global core functionality for all memory management schemes.
/// All global MMTk plans should implement this trait.
///
/// The global instance defines and manages static resources
/// (such as memory and virtual memory resources).
pub trait Plan: 'static + Sync + Send + Downcast {
type VM: VMBinding;
fn constraints(&self) -> &'static PlanConstraints;
fn create_worker_local(
&self,
tls: OpaquePointer,
mmtk: &'static MMTK<Self::VM>,
) -> GCWorkerLocalPtr;
fn base(&self) -> &BasePlan<Self::VM>;
fn schedule_collection(&'static self, _scheduler: &MMTkScheduler<Self::VM>);
fn common(&self) -> &CommonPlan<Self::VM> {
panic!("Common Plan not handled!")
}
fn mmapper(&self) -> &'static Mmapper {
self.base().mmapper
}
fn options(&self) -> &Options {
&self.base().options
}
// unsafe because this can only be called once by the init thread
fn gc_init(
&mut self,
heap_size: usize,
vm_map: &'static VMMap,
scheduler: &Arc<MMTkScheduler<Self::VM>>,
);
fn get_allocator_mapping(&self) -> &'static EnumMap<AllocationSemantics, AllocatorSelector>;
fn in_nursery(&self) -> bool {
false
}
#[cfg(feature = "sanity")]
fn enter_sanity(&self) {
self.base().inside_sanity.store(true, Ordering::Relaxed)
}
#[cfg(feature = "sanity")]
fn leave_sanity(&self) {
self.base().inside_sanity.store(false, Ordering::Relaxed)
}
#[cfg(feature = "sanity")]
fn is_in_sanity(&self) -> bool {
self.base().inside_sanity.load(Ordering::Relaxed)
}
fn is_initialized(&self) -> bool {
self.base().initialized.load(Ordering::SeqCst)
}
fn prepare(&self, tls: OpaquePointer);
fn release(&self, tls: OpaquePointer);
fn poll(&self, space_full: bool, space: &dyn Space<Self::VM>) -> bool {
if self.collection_required(space_full, space) {
// FIXME
/*if space == META_DATA_SPACE {
/* In general we must not trigger a GC on metadata allocation since
* this is not, in general, in a GC safe point. Instead we initiate
* an asynchronous GC, which will occur at the next safe point.
*/
self.log_poll(space, "Asynchronous collection requested");
self.common().control_collector_context.request();
return false;
}*/
self.log_poll(space, "Triggering collection");
self.base().control_collector_context.request();
return true;
}
// FIXME
/*if self.concurrent_collection_required() {
// FIXME
/*if space == self.common().meta_data_space {
self.log_poll(space, "Triggering async concurrent collection");
Self::trigger_internal_collection_request();
return false;
} else {*/
self.log_poll(space, "Triggering concurrent collection");
Self::trigger_internal_collection_request();
return true;
}*/
false
}
fn log_poll(&self, space: &dyn Space<Self::VM>, message: &'static str) {
info!(" [POLL] {}: {}", space.get_name(), message);
}
/**
* This method controls the triggering of a GC. It is called periodically
* during allocation. Returns <code>true</code> to trigger a collection.
*
* @param spaceFull Space request failed, must recover pages within 'space'.
* @param space TODO
* @return <code>true</code> if a collection is requested by the plan.
*/
fn collection_required(&self, space_full: bool, _space: &dyn Space<Self::VM>) -> bool {
let stress_force_gc = self.stress_test_gc_required();
debug!(
"self.get_pages_reserved()={}, self.get_total_pages()={}",
self.get_pages_reserved(),
self.get_total_pages()
);
let heap_full = self.get_pages_reserved() > self.get_total_pages();
space_full || stress_force_gc || heap_full
}
fn get_pages_reserved(&self) -> usize {
self.get_pages_used() + self.get_collection_reserve()
}
fn get_total_pages(&self) -> usize {
self.base().heap.get_total_pages()
}
fn get_pages_avail(&self) -> usize {
self.get_total_pages() - self.get_pages_reserved()
}
fn get_collection_reserve(&self) -> usize {
0
}
fn get_pages_used(&self) -> usize;
fn is_emergency_collection(&self) -> bool {
self.base().emergency_collection.load(Ordering::Relaxed)
}
fn get_free_pages(&self) -> usize {
self.get_total_pages() - self.get_pages_used()
}
#[inline]
fn stress_test_gc_required(&self) -> bool {
let stress_factor = self.base().options.stress_factor;
if self.is_initialized()
&& (self.base().allocation_bytes.load(Ordering::SeqCst) > stress_factor)
{
trace!(
"Stress GC: allocation_bytes = {}, stress_factor = {}",
self.base().allocation_bytes.load(Ordering::Relaxed),
stress_factor
);
trace!("Doing stress GC");
self.base().allocation_bytes.store(0, Ordering::SeqCst);
true
} else {
false
}
}
fn handle_user_collection_request(&self, tls: OpaquePointer, force: bool) {
if force || !self.options().ignore_system_g_c {
info!("User triggerring collection");
self.base()
.user_triggered_collection
.store(true, Ordering::Relaxed);
self.base().control_collector_context.request();
<Self::VM as VMBinding>::VMCollection::block_for_gc(tls);
}
}
fn reset_collection_trigger(&self) {
self.base()
.user_triggered_collection
.store(false, Ordering::Relaxed)
}
fn modify_check(&self, object: ObjectReference) {
if self.base().gc_in_progress_proper() && object.is_movable() {
panic!(
"GC modifying a potentially moving object via Java (i.e. not magic) obj= {}",
object
);
}
}
fn global_side_metadata_per_chunk(&self) -> usize {
0
}
}
impl_downcast!(Plan assoc VM);
#[derive(PartialEq)]
pub enum GcStatus {
NotInGC,
GcPrepare,
GcProper,
}
/**
BasePlan should contain all plan-related state and functions that are _fundamental_ to _all_ plans. These include VM-specific (but not plan-specific) features such as a code space or vm space, which are fundamental to all plans for a given VM. Features that are common to _many_ (but not intrinsically _all_) plans should instead be included in CommonPlan.
*/
pub struct BasePlan<VM: VMBinding> {
// Whether MMTk is now ready for collection. This is set to true when enable_collection() is called.
pub initialized: AtomicBool,
pub gc_status: Mutex<GcStatus>,
pub last_stress_pages: AtomicUsize,
pub stacks_prepared: AtomicBool,
pub emergency_collection: AtomicBool,
pub user_triggered_collection: AtomicBool,
// Has an allocation succeeded since the emergency collection?
pub allocation_success: AtomicBool,
// Maximum number of failed attempts by a single thread
pub max_collection_attempts: AtomicUsize,
// Current collection attempt
pub cur_collection_attempts: AtomicUsize,
// Lock used for out of memory handling
pub oom_lock: Mutex<()>,
pub control_collector_context: ControllerCollectorContext<VM>,
pub stats: Stats,
mmapper: &'static Mmapper,
pub vm_map: &'static VMMap,
pub options: Arc<UnsafeOptionsWrapper>,
pub heap: HeapMeta,
#[cfg(feature = "base_spaces")]
pub unsync: UnsafeCell<BaseUnsync<VM>>,
#[cfg(feature = "sanity")]
pub inside_sanity: AtomicBool,
// A counter for per-mutator stack scanning
pub scanned_stacks: AtomicUsize,
pub mutator_iterator_lock: Mutex<()>,
// A counter that keeps tracks of the number of bytes allocated since last stress test
pub allocation_bytes: AtomicUsize,
// Wrapper around analysis counters
#[cfg(feature = "analysis")]
pub analysis_manager: AnalysisManager<VM>,
}
#[cfg(feature = "base_spaces")]
pub struct BaseUnsync<VM: VMBinding> {
#[cfg(feature = "code_space")]
pub code_space: ImmortalSpace<VM>,
#[cfg(feature = "ro_space")]
pub ro_space: ImmortalSpace<VM>,
#[cfg(feature = "vm_space")]
pub vm_space: ImmortalSpace<VM>,
}
#[cfg(feature = "vm_space")]
pub fn create_vm_space<VM: VMBinding>(
vm_map: &'static VMMap,
mmapper: &'static Mmapper,
heap: &mut HeapMeta,
boot_segment_bytes: usize,
constraints: &'static PlanConstraints,
) -> ImmortalSpace<VM> {
use crate::util::constants::LOG_BYTES_IN_MBYTE;
// let boot_segment_bytes = BOOT_IMAGE_END - BOOT_IMAGE_DATA_START;
debug_assert!(boot_segment_bytes > 0);
use crate::util::conversions::raw_align_up;
use crate::util::heap::layout::vm_layout_constants::BYTES_IN_CHUNK;
let boot_segment_mb = raw_align_up(boot_segment_bytes, BYTES_IN_CHUNK) >> LOG_BYTES_IN_MBYTE;
ImmortalSpace::new(
"boot",
false,
VMRequest::fixed_size(boot_segment_mb),
vm_map,
mmapper,
heap,
constraints,
)
}
impl<VM: VMBinding> BasePlan<VM> {
#[allow(unused_mut)] // 'heap' only needs to be mutable for certain features
#[allow(unused_variables)] // 'constraints' is only needed for certain features
pub fn new(
vm_map: &'static VMMap,
mmapper: &'static Mmapper,
options: Arc<UnsafeOptionsWrapper>,
mut heap: HeapMeta,
constraints: &'static PlanConstraints,
) -> BasePlan<VM> {
let stats = Stats::new();
// Initializing the analysis manager and routines
#[cfg(feature = "analysis")]
let analysis_manager = AnalysisManager::new(&stats);
BasePlan {
#[cfg(feature = "base_spaces")]
unsync: UnsafeCell::new(BaseUnsync {
#[cfg(feature = "code_space")]
code_space: ImmortalSpace::new(
"code_space",
true,
VMRequest::discontiguous(),
vm_map,
mmapper,
&mut heap,
constraints,
),
#[cfg(feature = "ro_space")]
ro_space: ImmortalSpace::new(
"ro_space",
true,
VMRequest::discontiguous(),
vm_map,
mmapper,
&mut heap,
constraints,
),
#[cfg(feature = "vm_space")]
vm_space: create_vm_space(
vm_map,
mmapper,
&mut heap,
options.vm_space_size,
constraints,
),
}),
initialized: AtomicBool::new(false),
gc_status: Mutex::new(GcStatus::NotInGC),
last_stress_pages: AtomicUsize::new(0),
stacks_prepared: AtomicBool::new(false),
emergency_collection: AtomicBool::new(false),
user_triggered_collection: AtomicBool::new(false),
allocation_success: AtomicBool::new(false),
max_collection_attempts: AtomicUsize::new(0),
cur_collection_attempts: AtomicUsize::new(0),
oom_lock: Mutex::new(()),
control_collector_context: ControllerCollectorContext::new(),
stats,
mmapper,
heap,
vm_map,
options,
#[cfg(feature = "sanity")]
inside_sanity: AtomicBool::new(false),
scanned_stacks: AtomicUsize::new(0),
mutator_iterator_lock: Mutex::new(()),
allocation_bytes: AtomicUsize::new(0),
#[cfg(feature = "analysis")]
analysis_manager,
}
}
pub fn gc_init(
&mut self,
heap_size: usize,
vm_map: &'static VMMap,
scheduler: &Arc<MMTkScheduler<VM>>,
) {
vm_map.boot();
vm_map.finalize_static_space_map(
self.heap.get_discontig_start(),
self.heap.get_discontig_end(),
);
self.heap
.total_pages
.store(bytes_to_pages(heap_size), Ordering::Relaxed);
self.control_collector_context.init(scheduler);
#[cfg(feature = "base_spaces")]
{
let unsync = unsafe { &mut *self.unsync.get() };
#[cfg(feature = "code_space")]
unsync.code_space.init(vm_map);
#[cfg(feature = "ro_space")]
unsync.ro_space.init(vm_map);
#[cfg(feature = "vm_space")]
{
unsync.vm_space.init(vm_map);
unsync.vm_space.ensure_mapped();
}
}
}
#[cfg(feature = "base_spaces")]
pub fn get_pages_used(&self) -> usize {
let mut pages = 0;
let unsync = unsafe { &mut *self.unsync.get() };
#[cfg(feature = "code_space")]
{
pages += unsync.code_space.reserved_pages();
}
#[cfg(feature = "ro_space")]
{
pages += unsync.ro_space.reserved_pages();
}
// The VM space may be used as an immutable boot image, in which case, we should not count
// it as part of the heap size.
// #[cfg(feature = "vm_space")]
// {
// pages += unsync.vm_space.reserved_pages();
// }
pages
}
#[cfg(not(feature = "base_spaces"))]
pub fn get_pages_used(&self) -> usize {
0
}
pub fn trace_object<T: TransitiveClosure, C: CopyContext>(
&self,
_trace: &mut T,
_object: ObjectReference,
) -> ObjectReference {
#[cfg(feature = "base_spaces")]
{
let unsync = unsafe { &*self.unsync.get() };
#[cfg(feature = "code_space")]
{
if unsync.code_space.in_space(_object) {
trace!("trace_object: object in code space");
return unsync.code_space.trace_object::<T>(_trace, _object);
}
}
#[cfg(feature = "ro_space")]
{
if unsync.ro_space.in_space(_object) {
trace!("trace_object: object in ro_space space");
return unsync.ro_space.trace_object(_trace, _object);
}
}
#[cfg(feature = "vm_space")]
{
if unsync.vm_space.in_space(_object) {
trace!("trace_object: object in boot space");
return unsync.vm_space.trace_object(_trace, _object);
}
}
}
panic!("No special case for space in trace_object({:?})", _object);
}
pub fn prepare(&self, _tls: OpaquePointer, _primary: bool) {
#[cfg(feature = "base_spaces")]
let unsync = unsafe { &mut *self.unsync.get() };
#[cfg(feature = "code_space")]
unsync.code_space.prepare();
#[cfg(feature = "ro_space")]
unsync.ro_space.prepare();
#[cfg(feature = "vm_space")]
unsync.vm_space.prepare();
}
pub fn release(&self, _tls: OpaquePointer, _primary: bool) {
#[cfg(feature = "base_spaces")]
let unsync = unsafe { &mut *self.unsync.get() };
#[cfg(feature = "code_space")]
unsync.code_space.release();
#[cfg(feature = "ro_space")]
unsync.ro_space.release();
#[cfg(feature = "vm_space")]
unsync.vm_space.release();
}
pub fn set_collection_kind(&self) {
self.cur_collection_attempts.store(
if self.is_user_triggered_collection() {
1
} else {
self.determine_collection_attempts()
},
Ordering::Relaxed,
);
let emergency_collection = !self.is_internal_triggered_collection()
&& self.last_collection_was_exhaustive()
&& self.cur_collection_attempts.load(Ordering::Relaxed) > 1;
self.emergency_collection
.store(emergency_collection, Ordering::Relaxed);
if emergency_collection {
self.force_full_heap_collection();
}
}
pub fn set_gc_status(&self, s: GcStatus) {
let mut gc_status = self.gc_status.lock().unwrap();
if *gc_status == GcStatus::NotInGC {
self.stacks_prepared.store(false, Ordering::SeqCst);
// FIXME stats
self.stats.start_gc();
}
*gc_status = s;
if *gc_status == GcStatus::NotInGC {
// FIXME stats
if self.stats.get_gathering_stats() {
self.stats.end_gc();
}
}
}
pub fn stacks_prepared(&self) -> bool {
self.stacks_prepared.load(Ordering::SeqCst)
}
pub fn gc_in_progress(&self) -> bool {
*self.gc_status.lock().unwrap() != GcStatus::NotInGC
}
pub fn gc_in_progress_proper(&self) -> bool {
*self.gc_status.lock().unwrap() == GcStatus::GcProper
}
fn is_user_triggered_collection(&self) -> bool {
self.user_triggered_collection.load(Ordering::Relaxed)
}
fn determine_collection_attempts(&self) -> usize {
if !self.allocation_success.load(Ordering::Relaxed) {
self.max_collection_attempts.fetch_add(1, Ordering::Relaxed);
} else {
self.allocation_success.store(false, Ordering::Relaxed);
self.max_collection_attempts.store(1, Ordering::Relaxed);
}
self.max_collection_attempts.load(Ordering::Relaxed)
}
fn is_internal_triggered_collection(&self) -> bool {
// FIXME
false
}
fn last_collection_was_exhaustive(&self) -> bool {
true
}
fn force_full_heap_collection(&self) {}
pub fn increase_allocation_bytes_by(&self, size: usize) {
let old_allocation_bytes = self.allocation_bytes.fetch_add(size, Ordering::SeqCst);
trace!(
"Stress GC: old_allocation_bytes = {}, size = {}, allocation_bytes = {}",
old_allocation_bytes,
size,
self.allocation_bytes.load(Ordering::Relaxed),
);
}
}
/**
CommonPlan is for representing state and features used by _many_ plans, but that are not fundamental to _all_ plans. Examples include the Large Object Space and an Immortal space. Features that are fundamental to _all_ plans must be included in BasePlan.
*/
pub struct CommonPlan<VM: VMBinding> {
pub unsync: UnsafeCell<CommonUnsync<VM>>,
pub base: BasePlan<VM>,
}
pub struct CommonUnsync<VM: VMBinding> {
pub immortal: ImmortalSpace<VM>,
pub los: LargeObjectSpace<VM>,
}
impl<VM: VMBinding> CommonPlan<VM> {
pub fn new(
vm_map: &'static VMMap,
mmapper: &'static Mmapper,
options: Arc<UnsafeOptionsWrapper>,
mut heap: HeapMeta,
constraints: &'static PlanConstraints,
) -> CommonPlan<VM> {
CommonPlan {
unsync: UnsafeCell::new(CommonUnsync {
immortal: ImmortalSpace::new(
"immortal",
true,
VMRequest::discontiguous(),
vm_map,
mmapper,
&mut heap,
constraints,
),
los: LargeObjectSpace::new(
"los",
true,
VMRequest::discontiguous(),
vm_map,
mmapper,
&mut heap,
constraints,
),
}),
base: BasePlan::new(vm_map, mmapper, options, heap, constraints),
}
}
pub fn gc_init(
&mut self,
heap_size: usize,
vm_map: &'static VMMap,
scheduler: &Arc<MMTkScheduler<VM>>,
) {
self.base.gc_init(heap_size, vm_map, scheduler);
let unsync = unsafe { &mut *self.unsync.get() };
unsync.immortal.init(vm_map);
unsync.los.init(vm_map);
}
pub fn get_pages_used(&self) -> usize {
let unsync = unsafe { &*self.unsync.get() };
unsync.immortal.reserved_pages() + unsync.los.reserved_pages() + self.base.get_pages_used()
}
pub fn trace_object<T: TransitiveClosure, C: CopyContext>(
&self,
trace: &mut T,
object: ObjectReference,
) -> ObjectReference {
let unsync = unsafe { &*self.unsync.get() };
if unsync.immortal.in_space(object) {
trace!("trace_object: object in immortal space");
return unsync.immortal.trace_object(trace, object);
}
if unsync.los.in_space(object) {
trace!("trace_object: object in los");
return unsync.los.trace_object(trace, object);
}
self.base.trace_object::<T, C>(trace, object)
}
pub fn prepare(&self, tls: OpaquePointer, primary: bool) {
let unsync = unsafe { &mut *self.unsync.get() };
unsync.immortal.prepare();
unsync.los.prepare(primary);
self.base.prepare(tls, primary)
}
pub fn release(&self, tls: OpaquePointer, primary: bool) {
let unsync = unsafe { &mut *self.unsync.get() };
unsync.immortal.release();
unsync.los.release(primary);
self.base.release(tls, primary)
}
pub fn schedule_common<E: ProcessEdgesWork<VM = VM>>(
&self,
constraints: &'static PlanConstraints,
scheduler: &MMTkScheduler<VM>,
) {
// Schedule finalization
if !self.base.options.no_finalizer {
use crate::util::finalizable_processor::{Finalization, ForwardFinalization};
// finalization
scheduler.work_buckets[WorkBucketStage::RefClosure].add(Finalization::<E>::new());
// forward refs
if constraints.needs_forward_after_liveness {
scheduler.work_buckets[WorkBucketStage::RefForwarding]
.add(ForwardFinalization::<E>::new());
}
}
}
pub fn stacks_prepared(&self) -> bool {
self.base.stacks_prepared()
}
pub fn get_immortal(&self) -> &'static ImmortalSpace<VM> {
let unsync = unsafe { &*self.unsync.get() };
&unsync.immortal
}
pub fn get_los(&self) -> &'static LargeObjectSpace<VM> {
let unsync = unsafe { &*self.unsync.get() };
&unsync.los
}
}
use enum_map::Enum;
/// Allocation semantics that MMTk provides.
/// Each allocation request requires a desired semantic for the object to allocate.
#[repr(i32)]
#[derive(Clone, Copy, Debug, Enum)]
pub enum AllocationSemantics {
Default = 0,
Immortal = 1,
Los = 2,
Code = 3,
ReadOnly = 4,
}
|
//! Main weechat module
use weechat_sys::t_weechat_plugin;
use crate::LossyCString;
use libc::{c_char, c_int};
use std::borrow::Cow;
use std::ffi::CStr;
use std::{ptr, vec};
/// An iterator over the arguments of a command, yielding a String value for
/// each argument.
pub struct ArgsWeechat {
iter: vec::IntoIter<String>,
}
impl ArgsWeechat {
/// Create an ArgsWeechat object from the underlying weechat C types.
/// Expects the strings in argv to be valid utf8, if not invalid UTF-8
/// sequences are replaced with the replacement character.
pub fn new(argc: c_int, argv: *mut *mut c_char) -> ArgsWeechat {
let argc = argc as isize;
let args: Vec<String> = (0..argc)
.map(|i| {
let cstr = unsafe {
CStr::from_ptr(*argv.offset(i) as *const libc::c_char)
};
String::from_utf8_lossy(&cstr.to_bytes().to_vec()).to_string()
})
.collect();
ArgsWeechat {
iter: args.clone().into_iter(),
}
}
}
impl Iterator for ArgsWeechat {
type Item = String;
fn next(&mut self) -> Option<String> {
self.iter.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl ExactSizeIterator for ArgsWeechat {
fn len(&self) -> usize {
self.iter.len()
}
}
impl DoubleEndedIterator for ArgsWeechat {
fn next_back(&mut self) -> Option<String> {
self.iter.next_back()
}
}
/// Status for updating options
pub enum OptionChanged {
/// The option was successfully changed.
Changed = weechat_sys::WEECHAT_CONFIG_OPTION_SET_OK_CHANGED as isize,
/// The options value has not changed.
Unchanged = weechat_sys::WEECHAT_CONFIG_OPTION_SET_OK_SAME_VALUE as isize,
/// The option was not found.
NotFound = weechat_sys::WEECHAT_CONFIG_OPTION_SET_OPTION_NOT_FOUND as isize,
/// An error occurred changing the value.
Error = weechat_sys::WEECHAT_CONFIG_OPTION_SET_ERROR as isize,
}
impl OptionChanged {
pub(crate) fn from_int(v: i32) -> OptionChanged {
use OptionChanged::*;
match v {
weechat_sys::WEECHAT_CONFIG_OPTION_SET_OK_CHANGED => Changed,
weechat_sys::WEECHAT_CONFIG_OPTION_SET_OK_SAME_VALUE => Unchanged,
weechat_sys::WEECHAT_CONFIG_OPTION_SET_OPTION_NOT_FOUND => NotFound,
weechat_sys::WEECHAT_CONFIG_OPTION_SET_ERROR => Error,
_ => unreachable!(),
}
}
}
/// Main Weechat struct that encapsulates common weechat API functions.
/// It has a similar API as the weechat script API.
pub struct Weechat {
pub(crate) ptr: *mut t_weechat_plugin,
}
impl Weechat {
/// Create a Weechat object from a C t_weechat_plugin pointer.
/// * `ptr` - Pointer of the weechat plugin.
pub fn from_ptr(ptr: *mut t_weechat_plugin) -> Weechat {
assert!(!ptr.is_null());
Weechat { ptr }
}
#[inline]
pub(crate) fn get(&self) -> &t_weechat_plugin {
unsafe { &*self.ptr }
}
/// Write a message in WeeChat log file (weechat.log).
pub fn log(&self, msg: &str) {
let log_printf = self.get().log_printf.unwrap();
let fmt = LossyCString::new("%s");
let msg = LossyCString::new(msg);
unsafe {
log_printf(fmt.as_ptr(), msg.as_ptr());
}
}
/// Display a message on the core weechat buffer.
pub fn print(&self, msg: &str) {
let printf_date_tags = self.get().printf_date_tags.unwrap();
let fmt = LossyCString::new("%s");
let msg = LossyCString::new(msg);
unsafe {
printf_date_tags(
ptr::null_mut(),
0,
ptr::null(),
fmt.as_ptr(),
msg.as_ptr(),
);
}
}
/// Return a string color code for display.
/// * `color_name` - name the color
pub fn color(&self, color_name: &str) -> Cow<str> {
let weechat_color = self.get().color.unwrap();
let color_name = LossyCString::new(color_name);
unsafe {
let color = weechat_color(color_name.as_ptr());
CStr::from_ptr(color).to_string_lossy()
}
}
/// Retrieve a prefix value
///
/// Valid prefixes are:
/// * error
/// * network
/// * action
/// * join
/// * quit
///
/// An empty string will be returned if the prefix is not found
pub fn get_prefix(&self, prefix: &str) -> Cow<str> {
let prefix_fn = self.get().prefix.unwrap();
let prefix = LossyCString::new(prefix);
unsafe { CStr::from_ptr(prefix_fn(prefix.as_ptr())).to_string_lossy() }
}
/// Get some info from Weechat or a plugin.
/// * `info_name` - name the info
/// * `arguments` - arguments for the info
pub fn info_get(
&self,
info_name: &str,
arguments: &str,
) -> Option<Cow<str>> {
let info_get = self.get().info_get.unwrap();
let info_name = LossyCString::new(info_name);
let arguments = LossyCString::new(arguments);
unsafe {
let info =
info_get(self.ptr, info_name.as_ptr(), arguments.as_ptr());
if info.is_null() {
None
} else {
Some(CStr::from_ptr(info).to_string_lossy())
}
}
}
/// Get value of a plugin option
pub fn get_plugin_option(&self, option: &str) -> Option<Cow<str>> {
let config_get_plugin = self.get().config_get_plugin.unwrap();
let option_name = LossyCString::new(option);
unsafe {
let option = config_get_plugin(self.ptr, option_name.as_ptr());
if option.is_null() {
None
} else {
Some(CStr::from_ptr(option).to_string_lossy())
}
}
}
/// Set the value of a plugin option
pub fn set_plugin_option(
&self,
option: &str,
value: &str,
) -> OptionChanged {
let config_set_plugin = self.get().config_set_plugin.unwrap();
let option_name = LossyCString::new(option);
let value = LossyCString::new(value);
unsafe {
let result = config_set_plugin(
self.ptr,
option_name.as_ptr(),
value.as_ptr(),
);
OptionChanged::from_int(result as i32)
}
}
/// Evaluate a weechat expression and return the result
//
// TODO: Add hashtable options
pub fn eval_string_expression(&self, expr: &str) -> Option<Cow<str>> {
let string_eval_expression = self.get().string_eval_expression.unwrap();
let expr = LossyCString::new(expr);
unsafe {
let result = string_eval_expression(
expr.as_ptr(),
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
);
if result.is_null() {
None
} else {
Some(CStr::from_ptr(result).to_string_lossy())
}
}
}
}
|
use crate::Polynomial;
use core::ops::{Add, AddAssign};
use num::Zero;
use smallvec::smallvec;
impl<T> Add<Polynomial<T>> for Polynomial<T>
where
T: for<'r> AddAssign<&'r T> + Zero,
{
type Output = Polynomial<T>;
fn add(self, rhs: Polynomial<T>) -> Polynomial<T> {
let mut ret = self;
ret += &rhs;
ret
}
}
impl<T> Add<&Polynomial<T>> for Polynomial<T>
where
T: for<'r> AddAssign<&'r T> + Zero,
{
type Output = Polynomial<T>;
fn add(self, rhs: &Polynomial<T>) -> Polynomial<T> {
let mut ret = self;
ret += rhs;
ret
}
}
impl<T> Add<Polynomial<T>> for &Polynomial<T>
where
T: for<'r> AddAssign<&'r T> + Zero,
{
type Output = Polynomial<T>;
fn add(self, rhs: Polynomial<T>) -> Polynomial<T> {
let mut ret = rhs;
ret += self;
ret
}
}
impl<T> Add<&Polynomial<T>> for &Polynomial<T>
where
T: for<'r> AddAssign<&'r T> + Zero + Clone,
{
type Output = Polynomial<T>;
fn add(self, rhs: &Polynomial<T>) -> Polynomial<T> {
let mut ret = self.clone();
ret += rhs;
ret
}
}
impl<T> AddAssign<Polynomial<T>> for Polynomial<T>
where
T: for<'r> AddAssign<&'r T> + Zero,
{
fn add_assign(&mut self, rhs: Polynomial<T>) {
self.add_assign(&rhs);
}
}
impl<T> AddAssign<&Polynomial<T>> for Polynomial<T>
where
T: for<'r> AddAssign<&'r T> + Zero,
{
fn add_assign(&mut self, rhs: &Polynomial<T>) {
if self.rev_coeffs.len() < rhs.rev_coeffs.len() {
// the following line causes clippy to emit a spurious warning (using - in an AddAssign-implementation)
#[allow(clippy::suspicious_op_assign_impl)]
let num_add = rhs.rev_coeffs.len() - self.rev_coeffs.len();
self.rev_coeffs.reserve(num_add);
for _ in 0..num_add {
self.rev_coeffs.push(T::zero());
}
//self.rev_coeffs.resize_with(rhs.rev_coeffs.len(), T::zero); // SmallVec doesn't have resize_with :(
}
self.rev_coeffs
.iter_mut()
.zip(rhs.rev_coeffs.iter())
.for_each(|(l, r)| *l += r);
self.fixup_coefficients();
}
}
impl<T> Zero for Polynomial<T>
where
T: for<'r> AddAssign<&'r T> + Zero,
{
fn zero() -> Self {
Self {
rev_coeffs: smallvec![T::zero()],
}
}
fn is_zero(&self) -> bool {
self.order() == 0 && self.rev_coeffs.first().unwrap().is_zero()
}
}
#[cfg(test)]
mod tests {
use crate::*;
#[test]
fn test_add() {
let a = Polynomial::new(coefficients![1f32, 3.0, 3.0, 0.0]);
let b = Polynomial::new(coefficients![1f32, 0.0, 1.0]);
let expected = coefficients![1f32, 4.0, 3.0, 1.0];
// Polynomial + Polynomial
let c = a.clone() + b.clone();
assert_eq!(c.order(), 3);
assert_eq!(c.coeffs(), expected);
// Polynomial + Polynomial (swapped)
let c = b.clone() + a.clone();
assert_eq!(c.order(), 3);
assert_eq!(c.coeffs(), expected);
// Polynomial + &Polynomial
let c = a.clone() + &b;
assert_eq!(c.order(), 3);
assert_eq!(c.coeffs(), expected);
// &Polynomial + Polynomial
let c = &a + b.clone();
assert_eq!(c.order(), 3);
assert_eq!(c.coeffs(), expected);
// &Polynomial + &Polynomial
let c = &a + &b;
assert_eq!(c.order(), 3);
assert_eq!(c.coeffs(), expected);
// Removal of leading zeros
let b = Polynomial::new(coefficients![-1f32, -3.0, -3.0, 1.0]);
let c = a + b;
assert_eq!(c.order(), 0);
assert_eq!(c.coeffs(), coefficients![1f32]);
}
#[test]
fn test_add_assign() {
let a = Polynomial::new(coefficients![1f32, 3.0, 3.0, 0.0]);
let b = Polynomial::new(coefficients![1f32, 0.0, 1.0]);
let expected = coefficients![1f32, 4.0, 3.0, 1.0];
// Polynomial += Polynomial
let mut c = a.clone();
c += b.clone();
assert_eq!(c.order(), 3);
assert_eq!(c.coeffs(), expected);
// Polynomial += &Polynomial
let mut c = a.clone();
c += &b;
assert_eq!(c.order(), 3);
assert_eq!(c.coeffs(), expected);
// Polynomial += Polynomial (swapped)
let mut c = b.clone();
c += a.clone();
assert_eq!(c.order(), 3);
assert_eq!(c.coeffs(), expected);
// Polynomial += &Polynomial (swapped)
let mut c = b.clone();
c += &a;
assert_eq!(c.order(), 3);
assert_eq!(c.coeffs(), expected);
// Removal of leading zeros
let b = Polynomial::new(coefficients![-1f32, -3.0, -3.0, 1.0]);
let mut c = a.clone();
c += b;
assert_eq!(c.order(), 0);
assert_eq!(c.coeffs(), coefficients![1f32]);
}
}
|
use seed::{prelude::*, *};
// ----- ------
// Init
// ----- -----
fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {
Model
}
// ----- ------
// Model
// ----- -----
struct Model;
// ------ ------
// Update
// ----- ------
#[allow(clippy::empty_enum)]
enum Msg {}
fn update(_: Msg, _: &mut Model, _: &mut impl Orders<Msg>) {}
// ------ ------
// View
// ------ ------
fn view(_model: &Model) -> Node<Msg> {
div![
// The class required by GitHub styles. See `index.html`.
C!["markdown-body"],
md!(md_header()),
md!(include_str!("../md/examples.md")),
// `footer.html` is generated by `build.rs` during compilation.
raw!(include_str!("../md/generated_html/footer.html")),
]
}
const fn md_header() -> &'static str {
"# Markdown Example
Intended as a demo of using `md!` for markdown conversion.
And how to convert MD to HTML during compilation and include the result in the app code.
```bash
cargo make start
```
---
Open [127.0.0.1](//127.0.0.1:8000) in your browser."
}
// ------ ------
// Start
// ------ ------
#[wasm_bindgen(start)]
pub fn start() {
App::start("app", init, update, view);
}
|
//! # UAVCAN implementation
//!
//! The intent with this implementation right now is to present a transport
//! and session-management agnostic interface for UAVCAN. What I'm working on
//! here is not meant to implement the higher-level protocol features, such
//! as automatic heartbeat publication. It is simply meant to manage ingesting
//! and producing raw frames to go on the bus. There is room to provide
//! application-level constructs in this crate, but that's not what I'm working
//! on right now.
//!
//! ## Comparison to canadensis
//!
//! The only other Rust UAVCAN implementation with any real progess at the
//! moment is canadensis. I *believe* that it is fully functional but I haven't
//! verified that.
//!
//! canadensis seems to be providing a more specific implementation (CAN-only)
//! that provides more application level features (e.g. a Node w/ Heartbeat
//! publishing) that relies on a global allocator. The intent (or experiment)
//! here is to provide a single unified interface for different transports
//! and storage backends. Application level functionality can live on top of
//! this. I can see issues with this running into issues in multi-threaded
//! environments, but I'll get to those when I get to them.
#![no_std]
#![deny(warnings)]
#![feature(generic_associated_types)]
#![feature(test)]
#[allow(unused_imports)]
#[cfg(feature = "std")]
#[macro_use]
extern crate std;
#[cfg(test)]
extern crate test;
#[macro_use]
extern crate num_derive;
extern crate alloc;
pub mod time;
mod crc16;
pub mod transfer;
pub mod transport;
pub mod types;
pub use node::Node;
use time::Duration;
pub use transfer::TransferKind;
pub use streaming_iterator::StreamingIterator;
mod internal;
mod node;
pub mod session;
use types::*;
/// Protocol errors possible from receiving incoming frames.
#[derive(Copy, Clone, Debug)]
pub enum RxError {
TransferStartMissingToggle,
/// Anonymous transfers must only use a single frame
AnonNotSingleFrame,
/// Frames that are not last cannot have less than the maximum MTU
NonLastUnderUtilization,
/// No type of frame can contain empty data, must always have at least a tail byte
FrameEmpty,
/// Id field is formatted incorrectly
InvalidCanId,
/// Non-start frame received without session
NewSessionNoStart,
/// Session has expired
Timeout,
/// Frame is part of new
InvalidTransferId,
/// Internal SessionManager error
SessionError(session::SessionError),
}
/// Errors that can be caused by incorrect parameters for transmission
///
/// TODO I should be able to capture these errors in the type system, making it impossible to do,
/// but this is still a first pass, so I'll leave them as runtime for now.
#[derive(Copy, Clone, Debug)]
pub enum TxError {
AnonNotSingleFrame,
ServiceNoSourceID,
ServiceNoDestinationID,
}
// TODO could replace with custom impl's to reduce dependencies
// TODO how could I represent more priorities for different transports?
/// Protocol-level priorities.
///
/// Transports are supposed to be able to support more than these base 8
/// priorities, but there is currently no API for that.
#[derive(FromPrimitive, ToPrimitive, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Debug)]
pub enum Priority {
Exceptional,
Immediate,
Fast,
High,
Nominal,
Low,
Slow,
Optional,
}
/// Simple subscription type to
pub struct Subscription {
transfer_kind: TransferKind,
port_id: PortId,
extent: usize,
timeout: Duration,
}
impl Subscription {
pub fn new(
transfer_kind: TransferKind,
port_id: PortId,
extent: usize,
timeout: Duration,
) -> Self {
Self {
transfer_kind,
port_id,
extent,
timeout,
}
}
}
impl PartialEq for Subscription {
fn eq(&self, other: &Self) -> bool {
self.transfer_kind == other.transfer_kind && self.port_id == other.port_id
}
}
|
use crate::api::models::lock::Lock;
use crate::api::Result;
use crate::http::client::Client;
use std::borrow::Borrow;
use std::collections::HashMap;
pub struct Locks {
client: Client,
}
impl Locks {
pub fn new(client: Client) -> Locks {
Locks { client }
}
pub async fn all(&mut self) -> Result<Vec<Lock>> {
self.all_params(Vec::<(String, String)>::new()).await
}
pub async fn all_params<I, K, V>(&mut self, parameters: I) -> Result<Vec<Lock>>
where
I: IntoIterator,
I::Item: Borrow<(K, V)>,
K: AsRef<str>,
V: AsRef<str>,
{
self.client.get_with_params("locks", parameters).await
}
pub async fn show(&mut self, lock_id: &str) -> Result<Lock> {
let endpoint = format!("locks/{}", lock_id);
self.client.get(&endpoint).await
}
pub async fn search<I, K, V>(
&mut self,
payload: HashMap<&str, &str>,
parameters: I,
) -> Result<Vec<Lock>>
where
I: IntoIterator,
I::Item: Borrow<(K, V)>,
K: AsRef<str>,
V: AsRef<str>,
{
self.client
.post_with_params("locks/search", payload, parameters)
.await
}
pub async fn unlocked(&mut self, transaction_ids: Vec<&str>) -> Result<Vec<Lock>> {
let mut payload = HashMap::<&str, Vec<&str>>::new();
payload.insert("ids", transaction_ids);
self.client.post("locks/unlocked", payload).await
}
}
|
//! Based on the naive LMD-GHOST fork choice rule implementation in the specification:
//! <https://github.com/ethereum/eth2.0-specs/blob/65b615a4d4cf75a50b29d25c53f1bc5422770ae5/specs/core/0_fork-choice.md>
//!
//! `assert`s from Python are represented by statements that either delay the processing of the
//! offending object or return `Err`. All other operations that can raise exceptions in Python
//! (like indexing into `dict`s) are represented by statements that panic on failure.
use core::{cmp::Ordering, convert::TryInto as _, mem};
use std::collections::{BTreeMap, HashMap};
use anyhow::{ensure, Result};
use error_utils::DebugAsError;
use helper_functions::{beacon_state_accessors, crypto, misc, predicates};
use log::info;
use maplit::hashmap;
use thiserror::Error;
use transition_functions::process_slot;
use types::{
config::Config,
primitives::{Epoch, Gwei, Slot, ValidatorIndex, H256},
types::{Attestation, BeaconBlock, Checkpoint},
BeaconState,
};
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Error)]
enum Error<C: Config> {
#[error("slot {new_slot} is not later than {old_slot}")]
SlotNotLater { old_slot: Slot, new_slot: Slot },
#[error("block is not a descendant of finalized block (block: {block:?}, finalized_block: {finalized_block:?})")]
NotDescendantOfFinalized {
block: BeaconBlock<C>,
finalized_block: BeaconBlock<C>,
},
}
/// <https://github.com/ethereum/eth2.0-specs/blob/65b615a4d4cf75a50b29d25c53f1bc5422770ae5/specs/core/0_fork-choice.md#latestmessage>
type LatestMessage = Checkpoint;
#[allow(clippy::large_enum_variant)]
#[derive(Debug)]
enum DelayedObject<C: Config> {
BeaconBlock(BeaconBlock<C>),
Attestation(Attestation<C>),
}
/// <https://github.com/ethereum/eth2.0-specs/blob/65b615a4d4cf75a50b29d25c53f1bc5422770ae5/specs/core/0_fork-choice.md#store>
pub struct Store<C: Config> {
slot: Slot,
justified_checkpoint: Checkpoint,
finalized_checkpoint: Checkpoint,
// `blocks` and `block_states` could be combined into a single map.
// We've left them separate to match the specification more closely.
blocks: HashMap<H256, BeaconBlock<C>>,
block_states: HashMap<H256, BeaconState<C>>,
checkpoint_states: HashMap<Checkpoint, BeaconState<C>>,
latest_messages: HashMap<ValidatorIndex, LatestMessage>,
// Extra fields used for delaying and retrying objects.
delayed_until_block: HashMap<H256, Vec<DelayedObject<C>>>,
delayed_until_slot: BTreeMap<Slot, Vec<DelayedObject<C>>>,
}
impl<C: Config> Store<C> {
/// <https://github.com/ethereum/eth2.0-specs/blob/65b615a4d4cf75a50b29d25c53f1bc5422770ae5/specs/core/0_fork-choice.md#get_genesis_store>
pub fn new(genesis_state: BeaconState<C>) -> Self {
// The way the genesis block is constructed makes it possible for many parties to
// independently produce the same block. But why does the genesis block have to
// exist at all? Perhaps the first block could be proposed by a validator as well
// (and not necessarily in slot 0)?
let genesis_block = BeaconBlock {
// Note that:
// - `BeaconBlock.body.eth1_data` is not set to `state.latest_eth1_data`.
// - `BeaconBlock.slot` is set to 0 even if `C::genesis_slot()` is not 0.
state_root: crypto::hash_tree_root(&genesis_state),
..BeaconBlock::default()
};
let epoch = C::genesis_epoch();
let root = crypto::signed_root(&genesis_block);
let checkpoint = Checkpoint { epoch, root };
Self {
slot: genesis_state.slot,
justified_checkpoint: checkpoint,
finalized_checkpoint: checkpoint,
blocks: hashmap! {root => genesis_block},
block_states: hashmap! {root => genesis_state.clone()},
checkpoint_states: hashmap! {checkpoint => genesis_state},
latest_messages: hashmap! {},
delayed_until_slot: BTreeMap::new(),
delayed_until_block: HashMap::new(),
}
}
/// <https://github.com/ethereum/eth2.0-specs/blob/65b615a4d4cf75a50b29d25c53f1bc5422770ae5/specs/core/0_fork-choice.md#get_head>
///
/// Unlike the `get_head` function in the specification, this returns the [`BeaconState`]
/// produced after processing the current head block.
pub fn head_state(&self) -> &BeaconState<C> {
let mut current_root = self.justified_checkpoint.root;
let justified_slot = Self::epoch_start_slot(self.justified_checkpoint.epoch);
let head_root = loop {
let mut child_with_plurality = None;
for (&root, block) in &self.blocks {
if block.parent_root == current_root && justified_slot < block.slot {
let balance = self.latest_attesting_balance(root, block);
child_with_plurality = Some((balance, root)).max(child_with_plurality);
}
}
match child_with_plurality {
Some((_, root)) => current_root = root,
None => break current_root,
}
};
&self.block_states[&head_root]
}
/// <https://github.com/ethereum/eth2.0-specs/blob/65b615a4d4cf75a50b29d25c53f1bc5422770ae5/specs/core/0_fork-choice.md#on_tick>
///
/// Unlike `on_tick` in the specification, this should be called at the start of a slot instead
/// of every second. The fork choice rule doesn't need a precise timestamp.
pub fn on_slot(&mut self, slot: Slot) -> Result<()> {
ensure!(
self.slot < slot,
Error::<C>::SlotNotLater {
old_slot: self.slot,
new_slot: slot
},
);
self.slot = slot;
self.retry_delayed_until_slot(slot)
}
/// <https://github.com/ethereum/eth2.0-specs/blob/65b615a4d4cf75a50b29d25c53f1bc5422770ae5/specs/core/0_fork-choice.md#on_block>
pub fn on_block(&mut self, block: BeaconBlock<C>) -> Result<()> {
// The specification uses 2 different ways to calculate what appears to be the same value:
// - <https://github.com/ethereum/eth2.0-specs/blame/65b615a4d4cf75a50b29d25c53f1bc5422770ae5/specs/core/0_fork-choice.md#L155>
// - <https://github.com/ethereum/eth2.0-specs/blame/65b615a4d4cf75a50b29d25c53f1bc5422770ae5/specs/core/0_fork-choice.md#L159>
// We assume this is an oversight.
let finalized_slot = Self::epoch_start_slot(self.finalized_checkpoint.epoch);
// Ignore blocks from slots not later than the finalized block. Doing so ensures that:
// - The genesis block is accepted even though it does not represent a state transition.
// - Blocks that are already known and are received again are always accepted.
if block.slot <= finalized_slot {
return Ok(());
}
let parent_state = if let Some(state) = self.block_states.get(&block.parent_root) {
state
} else {
self.delay_until_block(block.parent_root, DelayedObject::BeaconBlock(block));
return Ok(());
};
if self.slot < block.slot {
self.delay_until_slot(block.slot, DelayedObject::BeaconBlock(block));
return Ok(());
}
let block_root = crypto::signed_root(&block);
ensure!(
self.ancestor(block_root, &block, finalized_slot) == self.finalized_checkpoint.root,
Error::NotDescendantOfFinalized {
block,
finalized_block: self.blocks[&self.finalized_checkpoint.root].clone(),
},
);
let mut state = parent_state.clone();
process_slot::state_transition(&mut state, &block, true);
let state = self.block_states.entry(block_root).or_insert(state);
// Add `block` to `self.blocks` only when it's passed all checks.
// See <https://github.com/ethereum/eth2.0-specs/issues/1288>.
self.blocks.insert(block_root, block);
if self.justified_checkpoint.epoch < state.current_justified_checkpoint.epoch {
self.justified_checkpoint = state.current_justified_checkpoint;
}
if self.finalized_checkpoint.epoch < state.finalized_checkpoint.epoch {
self.finalized_checkpoint = state.finalized_checkpoint;
}
self.retry_delayed_until_block(block_root)
}
/// <https://github.com/ethereum/eth2.0-specs/blob/65b615a4d4cf75a50b29d25c53f1bc5422770ae5/specs/core/0_fork-choice.md#on_attestation>
pub fn on_attestation(&mut self, attestation: Attestation<C>) -> Result<()> {
let target = attestation.data.target;
let base_state = if let Some(state) = self.block_states.get(&target.root) {
state
} else {
self.delay_until_block(target.root, DelayedObject::Attestation(attestation));
return Ok(());
};
let target_epoch_start = Self::epoch_start_slot(target.epoch);
if self.slot < target_epoch_start {
self.delay_until_slot(target_epoch_start, DelayedObject::Attestation(attestation));
return Ok(());
}
let target_state = self.checkpoint_states.entry(target).or_insert_with(|| {
let mut target_state = base_state.clone();
process_slot::process_slots(&mut target_state, target_epoch_start);
target_state
});
if self.slot <= attestation.data.slot {
self.delay_until_slot(
attestation.data.slot,
DelayedObject::Attestation(attestation),
);
return Ok(());
}
let new_message = LatestMessage {
epoch: target.epoch,
root: attestation.data.beacon_block_root,
};
let indexed_attestation =
beacon_state_accessors::get_indexed_attestation(target_state, &attestation)
.map_err(DebugAsError::new)?;
predicates::validate_indexed_attestation(target_state, &indexed_attestation)
.map_err(DebugAsError::new)?;
for index in indexed_attestation.attesting_indices.iter().copied() {
let old_message = self.latest_messages.entry(index).or_default();
if old_message.epoch < new_message.epoch {
*old_message = new_message;
}
}
Ok(())
}
pub fn block(&self, root: H256) -> Option<&BeaconBlock<C>> {
self.blocks.get(&root)
}
/// <https://github.com/ethereum/eth2.0-specs/blob/65b615a4d4cf75a50b29d25c53f1bc5422770ae5/specs/core/0_fork-choice.md#get_latest_attesting_balance>
///
/// The extra `block` parameter is used to avoid a redundant block lookup.
fn latest_attesting_balance(&self, root: H256, block: &BeaconBlock<C>) -> Gwei {
let justified_state = &self.checkpoint_states[&self.justified_checkpoint];
let active_indices = beacon_state_accessors::get_active_validator_indices(
justified_state,
beacon_state_accessors::get_current_epoch(justified_state),
);
active_indices
.into_iter()
.filter_map(|index| {
let latest_message = self.latest_messages.get(&index)?;
let latest_message_block = &self.blocks[&latest_message.root];
if self.ancestor(latest_message.root, latest_message_block, block.slot) == root {
// The `Result::expect` call would be avoidable if there were a function like
// `beacon_state_accessors::get_active_validator_indices` that returned
// references to the validators in addition to their indices.
let index: usize = index
.try_into()
.expect("validator index should fit in usize");
Some(justified_state.validators[index].effective_balance)
} else {
None
}
})
.sum()
}
/// <https://github.com/ethereum/eth2.0-specs/blob/65b615a4d4cf75a50b29d25c53f1bc5422770ae5/specs/core/0_fork-choice.md#get_ancestor>
///
/// The extra `block` parameter is used to avoid adding `block` to `self.blocks` before
/// verifying it. See <https://github.com/ethereum/eth2.0-specs/issues/1288>.
/// The parent of `block` must still be present in `self.blocks`, however.
fn ancestor(&self, root: H256, block: &BeaconBlock<C>, slot: Slot) -> H256 {
match block.slot.cmp(&slot) {
Ordering::Less => H256::zero(),
Ordering::Equal => root,
Ordering::Greater => {
let parent_root = block.parent_root;
let parent_block = &self.blocks[&block.parent_root];
self.ancestor(parent_root, parent_block, slot)
}
}
}
fn epoch_start_slot(epoch: Epoch) -> Slot {
misc::compute_start_slot_at_epoch::<C>(epoch)
}
fn delay_until_block(&mut self, block_root: H256, object: DelayedObject<C>) {
info!("object delayed until block {:?}: {:?}", block_root, object);
self.delayed_until_block
.entry(block_root)
.or_default()
.push(object)
}
fn delay_until_slot(&mut self, slot: Slot, object: DelayedObject<C>) {
info!("object delayed until slot {}: {:?}", slot, object);
self.delayed_until_slot
.entry(slot)
.or_default()
.push(object)
}
fn retry_delayed_until_block(&mut self, block_root: H256) -> Result<()> {
if let Some(delayed_objects) = self.delayed_until_block.remove(&block_root) {
self.retry_delayed(delayed_objects)?;
}
Ok(())
}
fn retry_delayed_until_slot(&mut self, slot: Slot) -> Result<()> {
let later_slots = self.delayed_until_slot.split_off(&(slot + 1));
let fulfilled_slots = mem::replace(&mut self.delayed_until_slot, later_slots);
for (_, objects) in fulfilled_slots {
self.retry_delayed(objects)?;
}
Ok(())
}
// Delayed objects are retried recursively, thus a long chain of them could overflow the stack.
// It may be that in practice only one object will be delayed for a particular reason most of
// the time. In that case this function would effectively be tail-recursive. The same applies to
// slots in `Store::retry_delayed_until_slot`. The `tramp` crate may be of use in that scenario.
// Or `become`, if that ever gets implemented.
fn retry_delayed(&mut self, objects: Vec<DelayedObject<C>>) -> Result<()> {
for object in objects {
info!("retrying delayed object: {:?}", object);
match object {
DelayedObject::BeaconBlock(block) => self.on_block(block)?,
DelayedObject::Attestation(attestation) => self.on_attestation(attestation)?,
}
}
Ok(())
}
}
// There used to be tests here but we were forced to omit them to save time.
|
use server::types::Sqrt;
use server::*;
use specs::*;
use server::component::flag::*;
use server::component::time::ThisFrame;
use component::*;
use config as ctfconfig;
use std::cmp::Ordering;
pub struct PickupFlagSystem;
#[derive(SystemData)]
pub struct PickupFlagSystemData<'a> {
pub config: Read<'a, Config>,
pub entities: Entities<'a>,
pub channel: Write<'a, OnFlag>,
pub thisframe: Read<'a, ThisFrame>,
// Player data
pub plane: ReadStorage<'a, Plane>,
pub is_player: ReadStorage<'a, IsPlayer>,
pub is_spec: ReadStorage<'a, IsSpectating>,
pub is_dead: ReadStorage<'a, IsDead>,
// These ones are for both
pub pos: WriteStorage<'a, Position>,
pub team: ReadStorage<'a, Team>,
// Flag Data
pub is_flag: ReadStorage<'a, IsFlag>,
pub carrier: WriteStorage<'a, FlagCarrier>,
pub lastdrop: ReadStorage<'a, LastDrop>,
}
impl<'a> System<'a> for PickupFlagSystem {
type SystemData = PickupFlagSystemData<'a>;
fn run(&mut self, mut data: Self::SystemData) {
let flags = (
&*data.entities,
&data.pos,
&data.team,
&data.carrier,
&data.is_flag,
&data.lastdrop,
).join()
.filter(|(ent, _, _, _, _, _)| {
data.is_dead.get(*ent).is_none() && data.is_spec.get(*ent).is_none()
})
.map(|(ent, pos, team, carrier, _, lastdrop)| (ent, *pos, *team, *carrier, *lastdrop))
.collect::<Vec<(Entity, Position, Team, FlagCarrier, LastDrop)>>();
for (f_ent, f_pos, f_team, carrier, lastdrop) in flags {
if carrier.0.is_some() {
continue;
}
let nearest = (
&*data.entities,
&data.pos,
&data.team,
&data.is_player,
&data.plane,
).join()
.filter(|(_, _, p_team, _, _)| f_team != **p_team)
.filter(|(ent, _, _, _, _)| {
// Check against time-since-drop
(data.thisframe.0 - lastdrop.time) > *ctfconfig::FLAG_NO_REGRAB_TIME
// Then check against contained player id
|| lastdrop.player.map(|x| x != *ent).unwrap_or(false)
})
.filter_map(|(p_ent, p_pos, _, _, p_plane)| {
let rad = ctfconfig::FLAG_RADIUS[&p_plane];
let dst = (*p_pos - f_pos).length2();
// Quickly filter out negative distances
// without doing a sqrt
if dst > rad * rad {
None
} else {
// Only calculate actual distance if necessary
Some((p_ent, dst.sqrt() - rad))
}
})
.min_by(|a, b| {
if a.1 < b.1 {
Ordering::Less
} else {
Ordering::Greater
}
});
if nearest.is_none() {
continue;
}
let nearest = nearest.unwrap().0;
let team = *data.team.get(nearest).unwrap();
*data.carrier.get_mut(f_ent).unwrap() = FlagCarrier(Some(nearest));
let ty = if team == f_team {
FlagEventType::Return
} else {
FlagEventType::PickUp
};
data.channel.single_write(FlagEvent {
ty,
player: Some(nearest),
flag: f_ent,
});
}
}
}
use super::LoginUpdateSystem;
use server::systems::PositionUpdate;
impl SystemInfo for PickupFlagSystem {
type Dependencies = (PositionUpdate, LoginUpdateSystem);
fn name() -> &'static str {
concat!(module_path!(), "::", line!())
}
fn new() -> Self {
Self {}
}
}
|
// Copyright 2019, 2020 Wingchain
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Contract sdk
#![allow(clippy::too_many_arguments)]
#[allow(unused_imports)]
#[macro_use]
extern crate contract_sdk_macro;
use std::rc::Rc;
use serde::de::DeserializeOwned;
use serde::export::PhantomData;
use serde::Serialize;
#[allow(unused_imports)]
pub use serde_json;
#[doc(hidden)]
pub use contract_sdk_macro::*;
#[allow(unused_imports)]
pub use contract_sdk_primitives::*;
pub mod import;
pub struct Context {
env: Rc<ContextEnv>,
call_env: Rc<CallEnv>,
contract_env: Rc<ContractEnv>,
}
pub struct Util;
pub struct Pay {
pay_value: Balance,
}
impl Context {
pub fn new() -> ContractResult<Self> {
let number = import::env_block_number();
let timestamp = import::env_block_timestamp();
let tx_hash = option_vec_from_func(import::env_tx_hash_read)?.map(Hash);
let contract_address =
option_vec_from_func(import::env_contract_address_read)?.map(Address);
let sender_address = option_vec_from_func(import::env_sender_address_read)?.map(Address);
let context = Self {
env: Rc::new(ContextEnv { number, timestamp }),
call_env: Rc::new(CallEnv { tx_hash }),
contract_env: Rc::new(ContractEnv {
contract_address,
sender_address,
}),
};
Ok(context)
}
pub fn env(&self) -> ContractResult<Rc<ContextEnv>> {
Ok(self.env.clone())
}
pub fn call_env(&self) -> ContractResult<Rc<CallEnv>> {
Ok(self.call_env.clone())
}
pub fn contract_env(&self) -> ContractResult<Rc<ContractEnv>> {
Ok(self.contract_env.clone())
}
pub fn emit_event<T: Serialize>(&self, name: String, data: T) -> ContractResult<()> {
let event = ContractEvent { name, data };
let event = serde_json::to_vec(&event).map_err(|_| ContractError::Serialize)?;
import::event_write(event.len() as _, event.as_ptr() as _);
Ok(())
}
pub fn balance_get(&self, address: &Address) -> ContractResult<Balance> {
let address = &address.0;
let balance = import::balance_read(address.len() as _, address.as_ptr() as _);
Ok(balance)
}
pub fn balance_transfer(
&self,
recipient_address: &Address,
value: Balance,
) -> ContractResult<()> {
let recipient_address = &recipient_address.0;
null_from_func(|| {
import::balance_transfer(
recipient_address.len() as _,
recipient_address.as_ptr() as _,
value,
)
})
}
pub fn balance_transfer_ea(
&self,
recipient_address: &Address,
value: Balance,
) -> ContractResult<()> {
let recipient_address = &recipient_address.0;
null_from_func_ea(|error_share_id| {
import::balance_transfer_ea(
recipient_address.len() as _,
recipient_address.as_ptr() as _,
value,
error_share_id,
)
})
}
pub fn contract_execute(
&self,
contract_address: &Address,
method: &str,
params: &[u8],
pay_value: Balance,
) -> ContractResult<Vec<u8>> {
let contract_address = &contract_address.0;
let method = method.as_bytes();
let result = vec_from_func(|share_id| {
import::contract_execute(
contract_address.len() as _,
contract_address.as_ptr() as _,
method.len() as _,
method.as_ptr() as _,
params.len() as _,
params.as_ptr() as _,
pay_value,
share_id,
)
})?;
Ok(result)
}
pub fn contract_execute_ea(
&self,
contract_address: &Address,
method: &str,
params: &[u8],
pay_value: Balance,
) -> ContractResult<Vec<u8>> {
let contract_address = &contract_address.0;
let method = method.as_bytes();
let result = vec_from_func_ea(|data_share_id, error_share_id| {
import::contract_execute_ea(
contract_address.len() as _,
contract_address.as_ptr() as _,
method.len() as _,
method.as_ptr() as _,
params.len() as _,
params.as_ptr() as _,
pay_value,
data_share_id,
error_share_id,
)
})?;
Ok(result)
}
}
impl Util {
pub fn new() -> ContractResult<Self> {
Ok(Util)
}
pub fn hash(&self, data: &[u8]) -> ContractResult<Hash> {
let result = vec_from_func(|share_id| {
import::util_hash(data.len() as _, data.as_ptr() as _, share_id)
})?;
Ok(Hash(result))
}
pub fn address(&self, data: &[u8]) -> ContractResult<Address> {
let result = vec_from_func(|share_id| {
import::util_address(data.len() as _, data.as_ptr() as _, share_id)
})?;
Ok(Address(result))
}
pub fn validate_address(&self, address: &Address) -> ContractResult<()> {
let data = address.0.as_slice();
import::util_validate_address(data.len() as _, data.as_ptr() as _);
Ok(())
}
pub fn validate_address_ea(&self, address: &Address) -> ContractResult<()> {
let data = address.0.as_slice();
let share_id = std::ptr::null::<u8>() as u64;
let error = import::util_validate_address_ea(data.len() as _, data.as_ptr() as _, share_id);
from_error_aware(error, share_id, || Ok(()))
}
}
impl Pay {
pub fn new() -> Self {
Self {
pay_value: import::pay_value_read(),
}
}
pub fn pay_value(&self) -> Balance {
self.pay_value
}
}
impl Default for Pay {
fn default() -> Self {
Self::new()
}
}
pub struct StorageValue<T>
where
T: Serialize + DeserializeOwned,
{
key: Vec<u8>,
phantom: PhantomData<T>,
}
impl<T> StorageValue<T>
where
T: Serialize + DeserializeOwned,
{
pub fn new(storage_key: &'static [u8]) -> Self {
StorageValue {
key: storage_key.to_vec(),
phantom: Default::default(),
}
}
pub fn get(&self) -> ContractResult<Option<T>> {
storage_get(&self.key)
}
pub fn set(&self, value: &T) -> ContractResult<()> {
storage_set(&self.key, value)
}
pub fn delete(&self) -> ContractResult<()> {
storage_delete(&self.key)
}
}
const SEPARATOR: &[u8] = b"_";
pub struct StorageMap<T>
where
T: Serialize + DeserializeOwned,
{
key: Vec<u8>,
phantom: PhantomData<T>,
}
impl<T> StorageMap<T>
where
T: Serialize + DeserializeOwned,
{
pub fn new(storage_key: &'static [u8]) -> Self {
StorageMap {
key: storage_key.to_vec(),
phantom: Default::default(),
}
}
pub fn get(&self, key: &[u8]) -> ContractResult<Option<T>> {
let key = &[&self.key, SEPARATOR, &key].concat();
storage_get(&key)
}
pub fn set(&self, key: &[u8], value: &T) -> ContractResult<()> {
let key = &[&self.key, SEPARATOR, &key].concat();
storage_set(&key, value)
}
pub fn delete(&self, key: &[u8]) -> ContractResult<()> {
let key = &[&self.key, SEPARATOR, &key].concat();
storage_delete(&key)
}
}
pub struct ContextEnv {
pub number: BlockNumber,
pub timestamp: u64,
}
pub struct CallEnv {
pub tx_hash: Option<Hash>,
}
pub struct ContractEnv {
pub contract_address: Option<Address>,
pub sender_address: Option<Address>,
}
fn storage_get<V: DeserializeOwned>(key: &[u8]) -> ContractResult<Option<V>> {
let value = option_vec_from_func(|share_id| {
import::storage_read(key.len() as _, key.as_ptr() as _, share_id)
})?;
let value = match value {
Some(value) => serde_json::from_slice(&value).map_err(|_| ContractError::Deserialize)?,
None => None,
};
Ok(value)
}
fn storage_set<V: Serialize>(key: &[u8], value: &V) -> ContractResult<()> {
let value = serde_json::to_vec(value).map_err(|_| ContractError::Serialize)?;
import::storage_write(
key.len() as _,
key.as_ptr() as _,
1,
value.len() as _,
value.as_ptr() as _,
);
Ok(())
}
fn storage_delete(key: &[u8]) -> ContractResult<()> {
import::storage_write(key.len() as _, key.as_ptr() as _, 0, 0, 0);
Ok(())
}
fn vec_from_func<F: Fn(u64)>(f: F) -> ContractResult<Vec<u8>> {
let share_id = std::ptr::null::<u8>() as u64;
f(share_id);
share_to_vec(share_id).ok_or(ContractError::ShareIllegalAccess)
}
fn option_vec_from_func<F: Fn(u64) -> u64>(f: F) -> ContractResult<Option<Vec<u8>>> {
let share_id = std::ptr::null::<u8>() as u64;
let value = match f(share_id) {
1 => {
let value = share_to_vec(share_id).ok_or(ContractError::ShareIllegalAccess)?;
Some(value)
}
_ => None,
};
Ok(value)
}
fn null_from_func<F: Fn()>(f: F) -> ContractResult<()> {
f();
Ok(())
}
fn vec_from_func_ea<F: Fn(u64, u64) -> u64>(f: F) -> ContractResult<Vec<u8>> {
let data_share_id = std::ptr::null::<u8>() as u64;
let error_share_id = 1u8 as *const u8 as u64;
let error = f(data_share_id, error_share_id);
let get_data = || -> ContractResult<Vec<u8>> {
share_to_vec(data_share_id).ok_or(ContractError::ShareIllegalAccess)
};
from_error_aware(error, error_share_id, get_data)
}
fn null_from_func_ea<F: Fn(u64) -> u64>(f: F) -> ContractResult<()> {
let error_share_id = std::ptr::null::<u8>() as u64;
let error = f(error_share_id);
from_error_aware(error, error_share_id, || Ok(()))
}
fn share_to_vec(share_id: u64) -> Option<Vec<u8>> {
let len = import::share_len(share_id);
match len {
u64::MAX => None,
_ => {
let data = vec![0u8; len as usize];
import::share_read(share_id, data.as_ptr() as _);
Some(data)
}
}
}
fn from_error_aware<F: Fn() -> ContractResult<T>, T>(
error: u64,
error_share_id: u64,
get_data: F,
) -> ContractResult<T> {
match error {
1 => {
let error = share_to_vec(error_share_id).ok_or(ContractError::ShareIllegalAccess)?;
let error = String::from_utf8(error).map_err(|_| ContractError::BadUTF8)?;
Err(error.as_str().into())
}
_ => get_data(),
}
}
|
use tokio::io::AsyncRead;
use crate::error::{TychoError, TychoResult};
use crate::ident::ValueIdent;
use crate::read::async_::func::{read_byte_async, read_bytes_async};
use crate::read::async_::length::read_length_async;
use crate::read::async_::number::{read_number_async, read_number_ident_async};
use crate::read::async_::string::{read_char_async, read_string_async};
use crate::{Value, Uuid};
pub(crate) async fn read_value_ident_async<R: AsyncRead + Unpin>(reader: &mut R) -> TychoResult<ValueIdent> {
let byte = read_byte_async(reader).await?;
match byte {
0x00 => Ok(ValueIdent::Null),
0x01 => Ok(ValueIdent::Boolean),
0x02 => Ok(ValueIdent::String),
0x03 => Ok(ValueIdent::Char),
0x04 => Ok(ValueIdent::Number(read_number_ident_async(reader).await?)),
0x05 => Ok(ValueIdent::Bytes),
0x06 => Ok(ValueIdent::UUID),
_ => Err(TychoError::InvalidIdent { found: byte, expecting: "value ident".to_string() })
}
}
pub(crate) async fn read_value_async<R: AsyncRead + Unpin>(reader: &mut R, ident: &ValueIdent) -> TychoResult<Value> {
match ident {
ValueIdent::Null => Ok(Value::Null),
ValueIdent::Boolean => Ok(Value::Boolean(read_byte_async(reader).await? == 0x01)),
ValueIdent::String => Ok(Value::String(read_string_async(reader).await?)),
ValueIdent::Char => Ok(Value::Char(read_char_async(reader).await?)),
ValueIdent::Number(n) => Ok(Value::Number(read_number_async(reader, n).await?)),
ValueIdent::Bytes => {
let length = read_length_async(reader).await?;
Ok(Value::Bytes(read_bytes_async(reader, length).await?))
}
ValueIdent::UUID => {
// suffering
let bytes = [
read_byte_async(reader).await?, read_byte_async(reader).await?,
read_byte_async(reader).await?, read_byte_async(reader).await?,
read_byte_async(reader).await?, read_byte_async(reader).await?,
read_byte_async(reader).await?, read_byte_async(reader).await?,
read_byte_async(reader).await?, read_byte_async(reader).await?,
read_byte_async(reader).await?, read_byte_async(reader).await?,
read_byte_async(reader).await?, read_byte_async(reader).await?,
read_byte_async(reader).await?, read_byte_async(reader).await?,
];
Ok(Value::UUID(Uuid::from_slice(bytes)))
}
}
} |
extern crate hal;
use crate::graphics;
use graphics::gfxal;
/// An index buffer, this is used to tell the gpu that you wish to draw
/// parts of a vertex buffer multiple times.
pub struct IndexBuffer {
buffer: gfxal::Buffer,
/// the index type, either u32 or u16, currently this should *always* be u16
pub index_type: hal::IndexType, //TODO: make this controllable
}
impl IndexBuffer {
/// Creates a new index buffer, **currently `T` should always be `u16`**
#[profiled]
pub fn new<T>(data: &mut [T]) -> IndexBuffer {
IndexBuffer {
buffer: gfxal::Buffer::new(data, hal::buffer::Usage::INDEX),
index_type: hal::IndexType::U16,
}
}
/// binds the index buffer for the next drawcall(s)
#[profiled]
pub fn bind(&mut self) {
use hal::command::CommandBuffer;
let mut ctx = crate_get_graphics_context!();
let idx = ctx.frame_idx;
let cmd_buffer = &mut ctx.command_buffers[idx];
unsafe {
cmd_buffer.bind_index_buffer(hal::buffer::IndexBufferView {
buffer: &*self.buffer.buffer,
offset: 0,
index_type: self.index_type,
});
}
}
}
|
pub mod eighth;
pub mod fifth;
pub mod first;
pub mod fourth;
pub mod second;
pub mod sixth;
pub mod third;
|
use bevy_asset::{self, Handle};
use bevy_reflect::TypeUuid;
use bevy_render::{color::Color, renderer::RenderResources, shader::ShaderDefs, texture::Texture};
/// A material with "standard" properties used in PBR lighting
#[derive(Debug, RenderResources, ShaderDefs, TypeUuid)]
#[uuid = "dace545e-4bc6-4595-a79d-c224fc694975"]
pub struct StandardMaterial {
pub albedo: Color,
#[shader_def]
pub albedo_texture: Option<Handle<Texture>>,
#[render_resources(ignore)]
#[shader_def]
pub unlit: bool,
}
impl Default for StandardMaterial {
fn default() -> Self {
StandardMaterial {
albedo: Color::rgb(1.0, 1.0, 1.0),
albedo_texture: None,
unlit: false,
}
}
}
impl From<Color> for StandardMaterial {
fn from(color: Color) -> Self {
StandardMaterial {
albedo: color,
..Default::default()
}
}
}
impl From<Handle<Texture>> for StandardMaterial {
fn from(texture: Handle<Texture>) -> Self {
StandardMaterial {
albedo_texture: Some(texture),
..Default::default()
}
}
}
|
// q0005_longest_palindromic_substring
struct Solution;
impl Solution {
pub fn longest_palindrome(s: String) -> String {
let s = s.as_bytes();
if s.len() <= 0 {
return "".to_string();
}
let mut tmpr = vec![];
for i in 0..s.len() {
tmpr.push((i, i));
if i != 0 && s[i] == s[i - 1] {
tmpr.push((i - 1, i))
}
}
for i in 0..tmpr.len() {
loop {
let (mut pi, mut li) = tmpr[i];
if pi <= 0 || li >= s.len() - 1 {
break;
}
pi -= 1;
li += 1;
if s[pi] == s[li] {
tmpr[i] = (pi, li);
} else {
break;
}
}
}
let mut max_index = (0, 0);
for (a, b) in tmpr.iter() {
if b - a > max_index.1 - max_index.0 {
max_index = (*a, *b)
}
}
return String::from_utf8_lossy(&s[max_index.0..=max_index.1]).to_string();
}
}
#[cfg(test)]
mod tests {
use super::Solution;
#[test]
fn it_works() {
assert_eq!(
Solution::longest_palindrome(String::from("babad")),
String::from("bab")
);
}
}
|
use crate::packet::Packet;
use std::collections::VecDeque;
/// control queue
pub(crate) type ControlQueue = VecDeque<Packet>;
|
use std::sync::Arc;
use datafusion::{
arrow::{
array::{ArrayRef, StringBuilder, UInt32Builder},
datatypes::DataType,
},
logical_plan::create_udf,
physical_plan::{
functions::{make_scalar_function, Volatility},
udf::ScalarUDF,
},
};
use crate::compile::QueryPlannerExecutionProps;
pub fn create_version_udf() -> ScalarUDF {
let version = make_scalar_function(|_args: &[ArrayRef]| {
let mut builder = StringBuilder::new(1);
builder.append_value("8.0.25").unwrap();
Ok(Arc::new(builder.finish()) as ArrayRef)
});
create_udf(
"version",
vec![],
Arc::new(DataType::Utf8),
Volatility::Immutable,
version,
)
}
pub fn create_db_udf(props: &QueryPlannerExecutionProps) -> ScalarUDF {
// Due our requirements it's more easy to clone this variable rather then Arc
let fixed_state = props.database.clone().unwrap_or("db".to_string());
let version = make_scalar_function(move |_args: &[ArrayRef]| {
let mut builder = StringBuilder::new(1);
builder.append_value(fixed_state.clone()).unwrap();
Ok(Arc::new(builder.finish()) as ArrayRef)
});
create_udf(
"database",
vec![],
Arc::new(DataType::Utf8),
Volatility::Immutable,
version,
)
}
pub fn create_connection_id_udf(props: &QueryPlannerExecutionProps) -> ScalarUDF {
// Due our requirements it's more easy to clone this variable rather then Arc
let fixed_connection_id = props.connection_id;
let version = make_scalar_function(move |_args: &[ArrayRef]| {
let mut builder = UInt32Builder::new(1);
builder.append_value(fixed_connection_id).unwrap();
Ok(Arc::new(builder.finish()) as ArrayRef)
});
create_udf(
"connection_id",
vec![],
Arc::new(DataType::UInt32),
Volatility::Immutable,
version,
)
}
|
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Contains configuration structs for the database.
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
/// Configuration parameters for the Sqlite Database.
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(default)]
pub struct DatabaseConfig {
/// The path to the SqliteDatabase to use. If set, the parent directory must exist and the
/// location must be writable. Saver will never fall back to an in-memory database if this is
/// set.
#[serde(skip_serializing_if = "Option::is_none")]
pub database_path: Option<PathBuf>,
/// Sets the cap for the number of scenarios to keep in the database. Set to None for
/// unlimited. Defaults to 1,000,000.
#[serde(skip_serializing_if = "Option::is_none")]
pub max_scenarios_to_keep: Option<u64>,
/// How often (in seconds) to prune excess scenarios while running normally. Defaults to every
/// 20 minutes (1200 seconds). Regardless of what this is set to, it will always prune on
/// shutdown unless max_scenarios_to_keep is unset.
pub prune_interval_seconds: u64,
}
impl Default for DatabaseConfig {
fn default() -> Self {
DatabaseConfig {
database_path: None,
max_scenarios_to_keep: Some(1000000),
prune_interval_seconds: 1200,
}
}
}
|
use crate::{
bit_set,
bit_set::{
ops::{Access, Capacity},
word, Word,
},
};
/// Trait to manipulate bits.
pub trait Insert: Access {
/// Insert bit, and return a **previous** value.
fn insert(&mut self, i: u64) -> bool;
}
macro_rules! impl_Insert_for_words {
($($ty:ty),*) => ($(
impl Insert for $ty {
#[inline]
fn insert(&mut self, i: u64) -> bool {
assert!(i < Self::CAPACITY);
if self.access(i) {
true
} else {
*self |= Self::bit(word::cast(i));
false
}
}
}
)*)
}
impl_Insert_for_words!(u8, u16, u32, u64, u128, usize);
impl<T: Capacity + Insert> Insert for [T] {
fn insert(&mut self, i: u64) -> bool {
let (index, offset) = bit_set::address(i, T::CAPACITY);
if self[index].access(offset) {
true
} else {
self[index].insert(offset);
false
}
}
}
|
//! The RFC959 Allocate (`ALLO`) command
//
// This command may be required by some servers to reserve
// sufficient storage to accommodate the new file to be
// transferred. The argument shall be a decimal integer
// representing the number of bytes (using the logical byte
// size) of storage to be reserved for the file. For files
// sent with record or page structure a maximum record or page
// size (in logical bytes) might also be necessary; this is
// indicated by a decimal integer in a second argument field of
// the command. This second argument is optional, but when
// present should be separated from the first by the three
// Telnet characters <SP> R <SP>. This command shall be
// followed by a STORe or APPEnd command. The ALLO command
// should be treated as a NOOP (no operation) by those servers
// which do not require that the maximum size of the file be
// declared beforehand, and those servers interested in only
// the maximum record or page size should accept a dummy value
// in the first argument and ignore it.
use crate::{
auth::UserDetail,
server::controlchan::{
error::ControlChanError,
handler::{CommandContext, CommandHandler},
Reply, ReplyCode,
},
storage::{Metadata, StorageBackend},
};
use async_trait::async_trait;
#[derive(Debug)]
pub struct Allo;
#[async_trait]
impl<Storage, User> CommandHandler<Storage, User> for Allo
where
User: UserDetail + 'static,
Storage: StorageBackend<User> + 'static,
Storage::Metadata: Metadata,
{
#[tracing_attributes::instrument]
async fn handle(&self, _args: CommandContext<Storage, User>) -> Result<Reply, ControlChanError> {
// ALLO is obsolete and we'll just ignore it.
Ok(Reply::new(ReplyCode::CommandOkayNotImplemented, "Ignored"))
}
}
|
mod bus;
mod cpu;
mod ram;
use crate::cpu::Cpu;
use std::env;
use std::fs::File;
use std::io::prelude::*;
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
panic!("ROM File Not Specified")
}
let mut file = File::open(&args[1]).expect("Couldn't open file");
let mut rom: Vec<u8> = Vec::new();
file.read_to_end(&mut rom).expect("Coudln't read file");
let mut cpu = Cpu::new(rom);
while cpu.can_run() {
let i = cpu.fetch();
cpu.execute(i);
}
cpu.print_mem(0xC);
}
|
use crate::error::Error;
use crate::error::ErrorKind;
// 16 Bits
/// two octets containing one of the RR TYPE codes.
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
pub struct Kind(pub u16);
impl Kind {
// Note
// 0 0x0000 RRTYPE zero is used as a special indicator for the SIG RR RFC2931,
// RFC4034 and in other circumstances and must never be allocated for ordinary use.
/// a host address (IPv4 Address)
pub const A: Self = Self(1);
/// an authoritative name server
pub const NS: Self = Self(2);
/// a mail destination (Obsolete - use MX)
pub const MD: Self = Self(3);
/// a mail forwarder (Obsolete - use MX)
pub const MF: Self = Self(4);
/// the canonical name for an alias
pub const CNAME: Self = Self(5);
/// marks the start of a zone of authority
pub const SOA: Self = Self(6);
/// a mailbox domain name (EXPERIMENTAL)
pub const MB: Self = Self(7);
/// a mail group member (EXPERIMENTAL)
pub const MG: Self = Self(8);
/// a mail rename domain name (EXPERIMENTAL)
pub const MR: Self = Self(9);
/// a null RR (EXPERIMENTAL)
pub const NULL: Self = Self(10);
/// a well known service description
pub const WKS: Self = Self(11);
/// a domain name pointer
pub const PTR: Self = Self(12);
/// host information
pub const HINFO: Self = Self(13);
/// mailbox or mail list information
pub const MINFO: Self = Self(14);
/// mail exchange
pub const MX: Self = Self(15);
/// text strings
pub const TXT: Self = Self(16);
// https://www.iana.org/assignments/dns-parameters/dns-parameters.xhtml#dns-parameters-4
/// for Responsible Person RFC1183
pub const RP: Self = Self(17);
/// for AFS Data Base location RFC1183 RFC5864
pub const AFSDB: Self = Self(18);
/// for X.25 PSDN address RFC1183
pub const X25: Self = Self(19);
/// for ISDN address RFC1183
pub const ISDN: Self = Self(20);
/// for Route Through RFC1183
pub const RT: Self = Self(21);
/// for NSAP address, NSAP style A record RFC1706
pub const NSAP: Self = Self(22);
/// for domain name pointer, NSAP style RFC1348 RFC1637 RFC1706
pub const NSAP_PTR: Self = Self(23);
/// for security signature RFC4034 RFC3755 RFC2535 RFC2536 RFC2537 RFC2931 RFC3110 RFC3008
pub const SIG: Self = Self(24);
/// for security key RFC4034 RFC3755 RFC2535 RFC2536 RFC2537 RFC2539 RFC3008 RFC3110
pub const KEY: Self = Self(25);
/// X.400 mail mapping information RFC2163
pub const PX: Self = Self(26);
/// Geographical Position RFC1712
pub const GPOS: Self = Self(27);
/// IPv6 Address, RFC3596
pub const AAAA: Self = Self(28);
/// Location Information RFC1876
pub const LOC: Self = Self(29);
/// Next Domain (OBSOLETE) RFC3755 RFC2535
pub const NXT: Self = Self(30);
/// Endpoint Identifier [Michael_Patton][http://ana-3.lcs.mit.edu/~jnc/nimrod/dns.txt] 1995-06
pub const EID: Self = Self(31);
/// Nimrod Locator (Michael_Patton)[http://ana-3.lcs.mit.edu/~jnc/nimrod/dns.txt] 1995-06
pub const NIMLOC: Self = Self(32);
/// Server Selection RFC2782
pub const SRV: Self = Self(33);
/// ATM Address [ ATM Forum Technical Committee, "ATM Name System, V2.0", Doc ID: AF-DANS-0152.000, July 2000. Available from and held in escrow by IANA.]
pub const ATMA: Self = Self(34);
/// Naming Authority Pointer RFC2915 RFC2168 RFC3403
pub const NAPTR: Self = Self(35);
/// Key Exchanger RFC2230
pub const KX: Self = Self(36);
/// DNAME RFC6672
pub const DNAME: Self = Self(39);
/// SINK (Donald_E_Eastlake)[http://tools.ietf.org/html/draft-eastlake-kitchen-sink] 1997-11
pub const SINK: Self = Self(40);
/// OPT RFC6891 RFC3225
pub const OPT: Self = Self(41);
/// APL RFC3123
pub const APL: Self = Self(42);
/// Delegation Signer RFC4034 RFC3658
pub const DS: Self = Self(43);
/// SSH Key Fingerprint RFC4255
pub const SSHFP: Self = Self(44);
/// IPSECKEY RFC4025
pub const IPSECKEY: Self = Self(45);
/// RRSIG RFC4034 RFC3755
pub const RRSIG: Self = Self(46);
/// NSEC RFC4034 RFC3755
pub const NSEC: Self = Self(47);
/// DNSKEY RFC4034 RFC3755
pub const DNSKEY: Self = Self(48);
/// DHCID RFC4701
pub const DHCID: Self = Self(49);
/// NSEC3 RFC5155
pub const NSEC3: Self = Self(50);
/// NSEC3PARAM RFC5155
pub const NSEC3PARAM: Self = Self(51);
/// TLSA RFC6698
pub const TLSA: Self = Self(52);
/// S/MIME cert association RFC8162
pub const SMIMEA: Self = Self(53);
/// Host Identity Protocol RFC8005
pub const HIP: Self = Self(55);
/// NINFO (Jim_Reid) NINFO/ninfo-completed-template 2008-01-21
pub const NINFO: Self = Self(56);
/// RKEY (Jim_Reid) RKEY/rkey-completed-template 2008-01-21
pub const RKEY: Self = Self(57);
/// Trust Anchor LINK (Wouter_Wijngaards) TALINK/talink-completed-template 2010-02-17
pub const TALINK: Self = Self(58);
/// Child DS RFC7344 CDS/cds-completed-template 2011-06-06
pub const CDS: Self = Self(59);
/// DNSKEY(s) the Child wants reflected in DS RFC7344 2014-06-16
pub const CDNSKEY: Self = Self(60);
/// OpenPGP Key RFC7929 OPENPGPKEY/openpgpkey-completed-template 2014-08-12
pub const OPENPGPKEY: Self = Self(61);
/// Child-To-Parent Synchronization RFC7477 2015-01-27
pub const CSYNC: Self = Self(62);
/// message digest for DNS zone (draft-wessels-dns-zone-digest) ZONEMD/zonemd-completed-template 2018-12-12
pub const ZONEMD: Self = Self(63);
/// RFC7208
pub const SPF: Self = Self(99);
/// [IANA-Reserved]
pub const UINFO: Self = Self(100);
/// [IANA-Reserved]
pub const UID: Self = Self(101);
/// [IANA-Reserved]
pub const GID: Self = Self(102);
/// [IANA-Reserved]
pub const UNSPEC: Self = Self(103);
/// RFC6742 ILNP/nid-completed-template
pub const NID: Self = Self(104);
/// RFC6742 ILNP/l32-completed-template
pub const L32: Self = Self(105);
/// RFC6742 ILNP/l64-completed-template
pub const L64: Self = Self(106);
/// RFC6742 ILNP/lp-completed-template
pub const LP: Self = Self(107);
/// an EUI-48 address RFC7043 EUI48/eui48-completed-template 2013-03-27
pub const EUI48: Self = Self(108);
/// an EUI-64 address RFC7043 EUI64/eui64-completed-template 2013-03-27
pub const EUI64: Self = Self(109);
/// Transaction Key RFC2930
pub const TKEY: Self = Self(249);
/// Transaction Signature RFC2845
pub const TSIG: Self = Self(250);
/// incremental transfer RFC1995
pub const IXFR: Self = Self(251);
// QTYPE values
/// A request for a transfer of an entire zone
pub const AXFR: Self = Self(252);
/// A request for mailbox-related records (MB, MG or MR)
pub const MAILB: Self = Self(253);
/// A request for mail agent RRs (Obsolete - see MX)
pub const MAILA: Self = Self(254);
/// A request for all records (*)
pub const ALL: Self = Self(255);
/// URI RFC7553 URI/uri-completed-template 2011-02-22
pub const URI: Self = Self(256);
/// Certification Authority Restriction [RFC-ietf-lamps-rfc6844bis-07] CAA/caa-completed-template 2011-04-07
pub const CAA: Self = Self(257);
/// Application Visibility and Control (Wolfgang_Riedel) AVC/avc-completed-template 2016-02-26
pub const AVC: Self = Self(258);
/// Digital Object Architecture [draft-durand-doa-over-dns] DOA/doa-completed-template 2017-08-30
pub const DOA: Self = Self(259);
/// Automatic Multicast Tunneling Relay [draft-ietf-mboned-driad-amt-discovery] AMTRELAY/amtrelay-completed-template 2019-02-06
pub const AMTRELAY: Self = Self(260);
/// DNSSEC Trust Authorities (Sam_Weiler)[http://cameo.library.cmu.edu/][ Deploying DNSSEC Without a Signed Root. Technical Report 1999-19, Information Networking Institute, Carnegie Mellon University, April 2004.] 2005-12-13
pub const TA: Self = Self(32768);
/// DNSSEC Lookaside Validation RFC4431
pub const DLV: Self = Self(32769);
#[inline]
pub fn is_pseudo_record_kind(&self) -> bool {
// Other types and pseudo resource records
// https://en.wikipedia.org/wiki/List_of_DNS_record_types#Other_types_and_pseudo_resource_records
//
// * 255 RFC 1035[1] All cached records
// AXFR 252 RFC 1035[1] Authoritative Zone Transfer
// IXFR 251 RFC 1996 Incremental Zone Transfer
// OPT 41 RFC 6891 Option
match *self {
Self::ALL | Self::AXFR | Self::IXFR | Self::OPT => true,
_ => false,
}
}
#[inline]
pub fn is_unassigned(&self) -> bool {
// Unassigned 54
// Unassigned 64-98
// Unassigned 110-248
// Unassigned 261-32767
// Unassigned 32770-65279
match self.0 {
54 | 64 ..= 98 | 110 ..= 248 | 261 ..= 32767 | 32770 ..= 65279 => true,
_ => false,
}
}
#[inline]
pub fn is_private_use(&self) -> bool {
// Private use 65280-65534
match self.0 {
65280 ..= 65534 => true,
_ => false,
}
}
#[inline]
pub fn is_reserved(&self) -> bool {
// Reserved 65535
match self.0 {
65535 => true,
_ => false,
}
}
}
impl std::fmt::Debug for Kind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
&Kind::A => write!(f, "A"),
&Kind::NS => write!(f, "NS"),
&Kind::MD => write!(f, "MD"),
&Kind::MF => write!(f, "MF"),
&Kind::CNAME => write!(f, "CNAME"),
&Kind::SOA => write!(f, "SOA"),
&Kind::MB => write!(f, "MB"),
&Kind::MG => write!(f, "MG"),
&Kind::MR => write!(f, "MR"),
&Kind::NULL => write!(f, "NULL"),
&Kind::WKS => write!(f, "WKS"),
&Kind::PTR => write!(f, "PTR"),
&Kind::HINFO => write!(f, "HINFO"),
&Kind::MINFO => write!(f, "MINFO"),
&Kind::MX => write!(f, "MX"),
&Kind::TXT => write!(f, "TXT"),
&Kind::RP => write!(f, "RP"),
&Kind::AFSDB => write!(f, "AFSDB"),
&Kind::X25 => write!(f, "X25"),
&Kind::ISDN => write!(f, "ISDN"),
&Kind::RT => write!(f, "RT"),
&Kind::NSAP => write!(f, "NSAP"),
&Kind::NSAP_PTR => write!(f, "NSAP-PTR"),
&Kind::SIG => write!(f, "SIG"),
&Kind::KEY => write!(f, "KEY"),
&Kind::PX => write!(f, "PX"),
&Kind::GPOS => write!(f, "GPOS"),
&Kind::AAAA => write!(f, "AAAA"),
&Kind::LOC => write!(f, "LOC"),
&Kind::NXT => write!(f, "NXT"),
&Kind::EID => write!(f, "EID"),
&Kind::NIMLOC => write!(f, "NIMLOC"),
&Kind::SRV => write!(f, "SRV"),
&Kind::ATMA => write!(f, "ATMA"),
&Kind::NAPTR => write!(f, "NAPTR"),
&Kind::KX => write!(f, "KX"),
&Kind::DNAME => write!(f, "DNAME"),
&Kind::SINK => write!(f, "SINK"),
&Kind::OPT => write!(f, "OPT"),
&Kind::APL => write!(f, "APL"),
&Kind::DS => write!(f, "DS"),
&Kind::SSHFP => write!(f, "SSHFP"),
&Kind::IPSECKEY => write!(f, "IPSECKEY"),
&Kind::RRSIG => write!(f, "RRSIG"),
&Kind::NSEC => write!(f, "NSEC"),
&Kind::DNSKEY => write!(f, "DNSKEY"),
&Kind::DHCID => write!(f, "DHCID"),
&Kind::NSEC3 => write!(f, "NSEC3"),
&Kind::NSEC3PARAM => write!(f, "NSEC3PARAM"),
&Kind::TLSA => write!(f, "TLSA"),
&Kind::SMIMEA => write!(f, "SMIMEA"),
&Kind::HIP => write!(f, "HIP"),
&Kind::NINFO => write!(f, "NINFO"),
&Kind::RKEY => write!(f, "RKEY"),
&Kind::TALINK => write!(f, "TALINK"),
&Kind::CDS => write!(f, "CDS"),
&Kind::CDNSKEY => write!(f, "CDNSKEY"),
&Kind::OPENPGPKEY => write!(f, "OPENPGPKEY"),
&Kind::CSYNC => write!(f, "CSYNC"),
&Kind::ZONEMD => write!(f, "ZONEMD"),
&Kind::SPF => write!(f, "SPF"),
&Kind::UINFO => write!(f, "UINFO"),
&Kind::UID => write!(f, "UID"),
&Kind::GID => write!(f, "GID"),
&Kind::UNSPEC => write!(f, "UNSPEC"),
&Kind::NID => write!(f, "NID"),
&Kind::L32 => write!(f, "L32"),
&Kind::L64 => write!(f, "L64"),
&Kind::LP => write!(f, "LP"),
&Kind::EUI48 => write!(f, "EUI48"),
&Kind::EUI64 => write!(f, "EUI64"),
&Kind::TKEY => write!(f, "TKEY"),
&Kind::TSIG => write!(f, "TSIG"),
&Kind::IXFR => write!(f, "IXFR"),
&Kind::AXFR => write!(f, "AXFR"),
&Kind::MAILB => write!(f, "MAILB"),
&Kind::MAILA => write!(f, "MAILA"),
&Kind::ALL => write!(f, "ALL"),
&Kind::URI => write!(f, "URI"),
&Kind::CAA => write!(f, "CAA"),
&Kind::AVC => write!(f, "AVC"),
&Kind::DOA => write!(f, "DOA"),
&Kind::AMTRELAY => write!(f, "AMTRELAY"),
&Kind::TA => write!(f, "TA"),
&Kind::DLV => write!(f, "DLV"),
_ => {
if self.is_unassigned() {
write!(f, "Unassigned({})", self.0)
} else if self.is_private_use() {
write!(f, "PrivateUse({})", self.0)
} else if self.is_reserved() {
write!(f, "Reserved({})", self.0)
} else {
write!(f, "Unknow({})", self.0)
}
},
}
}
}
impl std::fmt::Display for Kind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self)
}
}
impl std::str::FromStr for Kind {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"A" => Ok(Kind::A),
"NS" => Ok(Kind::NS),
"MD" => Ok(Kind::MD),
"MF" => Ok(Kind::MF),
"CNAME" => Ok(Kind::CNAME),
"SOA" => Ok(Kind::SOA),
"MB" => Ok(Kind::MB),
"MG" => Ok(Kind::MG),
"MR" => Ok(Kind::MR),
"NULL" => Ok(Kind::NULL),
"WKS" => Ok(Kind::WKS),
"PTR" => Ok(Kind::PTR),
"HINFO" => Ok(Kind::HINFO),
"MINFO" => Ok(Kind::MINFO),
"MX" => Ok(Kind::MX),
"TXT" => Ok(Kind::TXT),
"RP" => Ok(Kind::RP),
"AFSDB" => Ok(Kind::AFSDB),
"X25" => Ok(Kind::X25),
"ISDN" => Ok(Kind::ISDN),
"RT" => Ok(Kind::RT),
"NSAP" => Ok(Kind::NSAP),
"NSAP-PTR" => Ok(Kind::NSAP_PTR),
"SIG" => Ok(Kind::SIG),
"KEY" => Ok(Kind::KEY),
"PX" => Ok(Kind::PX),
"GPOS" => Ok(Kind::GPOS),
"AAAA" => Ok(Kind::AAAA),
"LOC" => Ok(Kind::LOC),
"NXT" => Ok(Kind::NXT),
"EID" => Ok(Kind::EID),
"NIMLOC" => Ok(Kind::NIMLOC),
"SRV" => Ok(Kind::SRV),
"ATMA" => Ok(Kind::ATMA),
"NAPTR" => Ok(Kind::NAPTR),
"KX" => Ok(Kind::KX),
"DNAME" => Ok(Kind::DNAME),
"SINK" => Ok(Kind::SINK),
"OPT" => Ok(Kind::OPT),
"APL" => Ok(Kind::APL),
"DS" => Ok(Kind::DS),
"SSHFP" => Ok(Kind::SSHFP),
"IPSECKEY" => Ok(Kind::IPSECKEY),
"RRSIG" => Ok(Kind::RRSIG),
"NSEC" => Ok(Kind::NSEC),
"DNSKEY" => Ok(Kind::DNSKEY),
"DHCID" => Ok(Kind::DHCID),
"NSEC3" => Ok(Kind::NSEC3),
"NSEC3PARAM" => Ok(Kind::NSEC3PARAM),
"TLSA" => Ok(Kind::TLSA),
"SMIMEA" => Ok(Kind::SMIMEA),
"HIP" => Ok(Kind::HIP),
"NINFO" => Ok(Kind::NINFO),
"RKEY" => Ok(Kind::RKEY),
"TALINK" => Ok(Kind::TALINK),
"CDS" => Ok(Kind::CDS),
"CDNSKEY" => Ok(Kind::CDNSKEY),
"OPENPGPKEY" => Ok(Kind::OPENPGPKEY),
"CSYNC" => Ok(Kind::CSYNC),
"ZONEMD" => Ok(Kind::ZONEMD),
"SPF" => Ok(Kind::SPF),
"UINFO" => Ok(Kind::UINFO),
"UID" => Ok(Kind::UID),
"GID" => Ok(Kind::GID),
"UNSPEC" => Ok(Kind::UNSPEC),
"NID" => Ok(Kind::NID),
"L32" => Ok(Kind::L32),
"L64" => Ok(Kind::L64),
"LP" => Ok(Kind::LP),
"EUI48" => Ok(Kind::EUI48),
"EUI64" => Ok(Kind::EUI64),
"TKEY" => Ok(Kind::TKEY),
"TSIG" => Ok(Kind::TSIG),
"IXFR" => Ok(Kind::IXFR),
"AXFR" => Ok(Kind::AXFR),
"MAILB" => Ok(Kind::MAILB),
"MAILA" => Ok(Kind::MAILA),
"ALL" => Ok(Kind::ALL),
"URI" => Ok(Kind::URI),
"CAA" => Ok(Kind::CAA),
"AVC" => Ok(Kind::AVC),
"DOA" => Ok(Kind::DOA),
"AMTRELAY" => Ok(Kind::AMTRELAY),
"TA" => Ok(Kind::TA),
"DLV" => Ok(Kind::DLV),
_ => Err(Error::from(ErrorKind::FormatError)),
}
}
} |
pub mod url;
pub mod http;
|
extern crate rusqlite;
extern crate rustc_serialize;
extern crate time;
extern crate torrent;
use rusqlite::Connection;
use torrent::Metainfo;
use rustc_serialize::hex::ToHex;
use std::error::Error;
use std::io::Read;
use std::fs::File;
use std::io;
use std::env;
const DATABASE_FILE: &'static str = "lib.rus.ec.db";
const INSERT_ARCHIVE: &'static str = "INSERT INTO archives (name, created, hash, total_length, piece_length, pieces_count)
VALUES (?, ?, ?, ?, ?, ?)";
const INSERT_PIECE: &'static str = "INSERT INTO pieces (archive_id, piece_idx, hash) VALUES (?, ?, ?)";
fn load() -> Result<Vec<u8>, io::Error> {
let args = env::args().collect::<Vec<_>>();
let mut buffer = Vec::new();
if 1 == args.len() {
let stdin = io::stdin();
let mut handle = stdin.lock();
handle.read_to_end(&mut buffer)?
} else {
File::open(&args[1])?.read_to_end(&mut buffer)?
};
Ok(buffer)
}
fn insert_metainfo(metainfo: &Metainfo, conn: &Connection) -> Result<i64, rusqlite::Error> {
conn.execute(
INSERT_ARCHIVE,
&[
&metainfo.get_file_name(),
&metainfo.get_creation_date(),
&metainfo.get_info_hash(),
&(metainfo.get_length() as i64),
&(metainfo.get_piece_length() as i64),
&(metainfo.get_piece_count() as i64),
],
)?;
Ok(conn.last_insert_rowid())
}
fn insert_pieces(
metainfo: &Metainfo,
archive_id: i64,
conn: &mut Connection,
) -> Result<(), rusqlite::Error> {
let tx = conn.transaction()?;
{
let mut stmt = tx.prepare(INSERT_PIECE)?;
let pieces: &[u8] = metainfo.info.pieces.as_ref();
let mut index = 0;
for hash in pieces.chunks(20) {
stmt.execute(&[&archive_id, &index, &hash.to_hex()])?;
index += 1;
}
}
tx.commit()?;
Ok(())
}
fn upload(metainfo: Metainfo) -> Result<(), rusqlite::Error> {
println!("file name: {}", &metainfo.get_file_name());
println!("creation date: {}", &metainfo.get_creation_date());
println!("info hash: {}", &metainfo.get_info_hash());
println!("total length: {}", &metainfo.get_length());
println!("piece length: {}", &metainfo.get_piece_length());
println!("piece count: {}", &metainfo.get_piece_count());
let mut conn = Connection::open(DATABASE_FILE)?;
let archive_id = insert_metainfo(&metainfo, &conn)?;
insert_pieces(&metainfo, archive_id, &mut conn)
}
fn insert(data: Metainfo) -> Result<(), io::Error> {
upload(data).map_err(|e| io::Error::new(io::ErrorKind::Other, e.description()))
}
fn parse(data: Vec<u8>) -> Result<Metainfo, io::Error> {
Metainfo::from(&data).map_err(|e| io::Error::new(io::ErrorKind::Other, e.description()))
}
fn main() {
println!("{:?}", load().and_then(parse).and_then(insert));
}
|
use heck::*;
use proc_macro2::Span;
use quote::{quote, ToTokens};
use syn::parse::{Parse, ParseStream, Result};
use syn::punctuated::Punctuated;
use syn::{parenthesized, parse_macro_input, token, Ident, Token, Type};
pub fn make(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let parsed = parse_macro_input!(item as On);
let t = quote!(#parsed);
dbg!(format!("{:#}", t));
proc_macro::TokenStream::from(t)
}
#[derive(Clone)]
pub enum OnReturnParams {
None,
Single(token::RArrow, Type),
Multi(token::RArrow, token::Paren, Punctuated<Type, Token![,]>),
}
pub(crate) struct On {
pub name: Ident,
pub paren: token::Paren,
pub params: Punctuated<Type, Token![,]>,
pub ret: OnReturnParams,
pub default_ret: Option<Ident>,
}
impl Parse for On {
fn parse(input: ParseStream) -> Result<Self> {
let content;
Ok(Self {
name: input.parse()?,
paren: parenthesized!(content in input),
params: content.parse_terminated(Type::parse)?,
ret: {
let lookahead = input.lookahead1();
if lookahead.peek(token::RArrow) {
let arrow = input.parse()?;
let lookahead = input.lookahead1();
if lookahead.peek(token::Paren) {
let content;
OnReturnParams::Multi(arrow, parenthesized!(content in input), content.parse_terminated(Type::parse)?)
} else {
OnReturnParams::Single(arrow, input.parse()?)
}
} else {
OnReturnParams::None
}
},
default_ret: None,
})
}
}
impl ToTokens for On {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let ident = &self.name.to_string().to_camel_case();
let params = &self.params;
let on_ident = Ident::new(&format!("On{}", ident).to_camel_case(), Span::call_site());
let ret = match self.ret {
OnReturnParams::None => quote!{},
OnReturnParams::Single(arrow, ref ty) => quote!{
#arrow #ty
},
OnReturnParams::Multi(arrow, _, ref params) => quote!{
#arrow (#params)
}
};
/* // waits for https://github.com/rust-lang/rfcs/issues/1672
let param_names = &(0..params.len()).map(|i| Ident::new(&format!("arg{}", i), Span::call_site())).collect::<Vec<_>>();
let default_ret = match self.default_ret {
Some(ref value) => quote! {
impl <T> From<T> for #on_ident where T: FnMut(#(#params,)*) + Sized + 'static {
fn from(t: T) -> #on_ident {
#on_ident(CallbackId::next(), Box::new(move |#(#param_names,)*| {
t(#(#param_names,)*);
#value
}))
}
}
},
None => quote! {}
};*/
let expr = quote! {
pub struct #on_ident(CallbackId, Box<dyn FnMut(#(#params,)* ) #ret>);
impl Callback for #on_ident {
fn name(&self) -> &'static str {
stringify!(#on_ident)
}
fn id(&self) -> CallbackId {
self.0
}
}
//#default_ret
impl <T> From<T> for #on_ident where T: FnMut(#(#params,)*) #ret + Sized + 'static {
fn from(t: T) -> #on_ident {
#on_ident(CallbackId::next(), Box::new(t))
}
}
impl AsRef<dyn FnMut(#(#params,)*) #ret> for #on_ident {
fn as_ref(&self) -> &(dyn FnMut(#(#params,)*) #ret + 'static) {
self.1.as_ref()
}
}
impl AsMut<dyn FnMut(#(#params,)*) #ret> for #on_ident {
fn as_mut(&mut self) -> &mut (dyn FnMut(#(#params,)*) #ret + 'static) {
self.1.as_mut()
}
}
impl From<#on_ident> for (CallbackId, Box<dyn FnMut(#(#params,)*) #ret>) {
fn from(a: #on_ident) -> Self {
(a.0, a.1)
}
}
impl From<(CallbackId, Box<dyn FnMut(#(#params,)*) #ret>)> for #on_ident {
fn from(a: (CallbackId, Box<dyn FnMut(#(#params,)*) #ret>)) -> Self {
#on_ident(a.0, a.1)
}
}
impl ::std::fmt::Debug for #on_ident {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "{}({})", self.name(), self.id())
}
}
impl ::std::cmp::PartialEq for #on_ident {
fn eq(&self, other: &#on_ident) -> bool {
self.id().eq(&other.id())
}
}
};
expr.to_tokens(tokens)
}
}
|
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
#[proc_macro_derive(Bundle, attributes(folder))]
pub fn bundle(input: TokenStream) -> TokenStream {
let input = syn::parse_macro_input!(input as syn::DeriveInput);
let name = &input.ident;
let mut folder = None;
for attr in input.attrs {
if let Ok(syn::Meta::NameValue(meta_name_value)) = attr.parse_meta() {
let syn::MetaNameValue { path, lit, .. } = meta_name_value;
let is_folder_attr = path.get_ident().unwrap().to_string() == "folder";
if let syn::Lit::Str(lit) = lit {
if is_folder_attr {
folder = Some(lit.value());
} else {
let msg = r#"Bad path! should be similar to #[folder = "dist/"]"#;
return syn::Error::new_spanned(&attr, msg)
.to_compile_error()
.into();
}
} else {
let msg = "folder path provided was not a string literal!";
return syn::Error::new_spanned(&attr, msg)
.to_compile_error()
.into();
}
}
}
let folder = folder.expect(r#"No path provided, should be similar to #[folder = "dist/"]"#);
let window_mode = if cfg!(feature = "nowindow") {
quote! {
const CREATE_NO_WINDOW: u32 = 0x08000000;
cmd.creation_flags(CREATE_NO_WINDOW);
}
} else {
quote! {}
};
let expanded = quote! {
#[derive(rust_embed::RustEmbed)]
#[folder = #folder]
struct __Asset;
struct __Packed {
temp_dir: tempfile::TempDir,
exe_path: ::std::option::Option<::std::path::PathBuf>,
}
impl __Packed {
fn new() -> Self {
Self {
temp_dir: tempfile::TempDir::new().expect("Could not create temp dir"),
exe_path: None,
}
}
fn dump(&mut self) {
for file in __Asset::iter() {
let file_path = ::std::path::Path::new(file.as_ref());
if file_path.extension().expect("Could not get filename") == "exe" {
self.exe_path = Some(file_path.to_path_buf())
};
let path = self.temp_dir.path().join(file_path);
let folders = path.parent().expect("Could not get parent");
if !folders.to_str().expect("Not unicode!").is_empty() {
::std::fs::create_dir_all(folders)
.expect("Could not create dirs recursively for embedded files");
}
let data = __Asset::get(file_path.to_str().expect("File path is not unicode"))
.expect("Could not get the asset");
::std::fs::write(path, data).expect("Writing in temp directory failed");
}
}
fn launch(&self) {
if let Some(exe_path) = &self.exe_path {
__execute(self.temp_dir.path(), exe_path);
} else {
eprintln!("No executable found!");
}
}
}
impl #name {
fn run() {
let mut packed = __Packed::new();
packed.dump();
packed.launch();
}
}
fn __execute(temp_dir: &::std::path::Path, exe_path: &::std::path::Path) {
use ::std::os::windows::process::CommandExt;
::std::env::set_current_dir(temp_dir).expect("Could not change directory");
let mut cmd = ::std::process::Command::new(exe_path);
#window_mode
let mut child = cmd.spawn().expect("Could not spawn command");
child.wait().expect("command wasn't running");
}
};
expanded.into()
}
|
use crate::config::{AndroidBuildTarget, AndroidConfig};
use anyhow::format_err;
use cargo::core::{Target, TargetKind, Workspace};
use cargo::util::{process, CargoResult, ProcessBuilder};
use std::ffi::OsStr;
use std::path::PathBuf;
/// Returns the directory in which all cargo apk artifacts for the current
/// debug/release configuration should be produced.
pub fn get_root_build_directory(workspace: &Workspace, config: &AndroidConfig) -> PathBuf {
let android_artifacts_dir = workspace
.target_dir()
.join("android-artifacts")
.into_path_unlocked();
if config.release {
android_artifacts_dir.join("release")
} else {
android_artifacts_dir.join("debug")
}
}
/// Returns the sub directory within the root build directory for the specified target.
pub fn get_target_directory(root_build_dir: &PathBuf, target: &Target) -> CargoResult<PathBuf> {
let target_directory = match target.kind() {
TargetKind::Bin => root_build_dir.join("bin"),
TargetKind::ExampleBin => root_build_dir.join("examples"),
_ => unreachable!("Unexpected target kind"),
};
let target_directory = target_directory.join(target.name());
Ok(target_directory)
}
/// Returns path to NDK provided make
pub fn make_path(config: &AndroidConfig) -> PathBuf {
config.ndk_path.join("prebuild").join(HOST_TAG).join("make")
}
/// Returns the path to the LLVM toolchain provided by the NDK
pub fn llvm_toolchain_root(config: &AndroidConfig) -> PathBuf {
config
.ndk_path
.join("toolchains")
.join("llvm")
.join("prebuilt")
.join(HOST_TAG)
}
// Helper function for looking for a path based on the platform version
// Calls a closure for each attempt and then return the PathBuf for the first file that exists.
// Uses approach that NDK build tools use which is described at:
// https://developer.android.com/ndk/guides/application_mk
// " - The platform version matching APP_PLATFORM.
// - The next available API level below APP_PLATFORM. For example, android-19 will be used when
// APP_PLATFORM is android-20, since there were no new native APIs in android-20.
// - The minimum API level supported by the NDK."
pub fn find_ndk_path<F>(platform: u32, path_builder: F) -> CargoResult<PathBuf>
where
F: Fn(u32) -> PathBuf,
{
let mut tmp_platform = platform;
// Look for the file which matches the specified platform
// If that doesn't exist, look for a lower version
while tmp_platform > 1 {
let path = path_builder(tmp_platform);
if path.exists() {
return Ok(path);
}
tmp_platform -= 1;
}
// If that doesn't exist... Look for a higher one. This would be the minimum API level supported by the NDK
tmp_platform = platform;
while tmp_platform < 100 {
let path = path_builder(tmp_platform);
if path.exists() {
return Ok(path);
}
tmp_platform += 1;
}
Err(format_err!("Unable to find NDK file"))
}
// Returns path to clang executable/script that should be used to build the target
pub fn find_clang(
config: &AndroidConfig,
build_target: AndroidBuildTarget,
) -> CargoResult<PathBuf> {
let bin_folder = llvm_toolchain_root(config).join("bin");
find_ndk_path(config.min_sdk_version, |platform| {
bin_folder.join(format!(
"{}{}-clang{}",
build_target.ndk_llvm_triple(),
platform,
EXECUTABLE_SUFFIX_CMD
))
})
.map_err(|_| format_err!("Unable to find NDK clang"))
}
// Returns path to clang++ executable/script that should be used to build the target
pub fn find_clang_cpp(
config: &AndroidConfig,
build_target: AndroidBuildTarget,
) -> CargoResult<PathBuf> {
let bin_folder = llvm_toolchain_root(config).join("bin");
find_ndk_path(config.min_sdk_version, |platform| {
bin_folder.join(format!(
"{}{}-clang++{}",
build_target.ndk_llvm_triple(),
platform,
EXECUTABLE_SUFFIX_CMD
))
})
.map_err(|_| format_err!("Unable to find NDK clang++"))
}
// Returns path to ar.
pub fn find_ar(config: &AndroidConfig, build_target: AndroidBuildTarget) -> CargoResult<PathBuf> {
let ar_path = llvm_toolchain_root(config).join("bin").join(format!(
"{}-ar{}",
build_target.ndk_triple(),
EXECUTABLE_SUFFIX_EXE
));
if ar_path.exists() {
Ok(ar_path)
} else {
Err(format_err!(
"Unable to find ar at `{}`",
ar_path.to_string_lossy()
))
}
}
// Returns path to readelf
pub fn find_readelf(
config: &AndroidConfig,
build_target: AndroidBuildTarget,
) -> CargoResult<PathBuf> {
let readelf_path = llvm_toolchain_root(config).join("bin").join(format!(
"{}-readelf{}",
build_target.ndk_triple(),
EXECUTABLE_SUFFIX_EXE
));
if readelf_path.exists() {
Ok(readelf_path)
} else {
Err(format_err!(
"Unable to find readelf at `{}`",
readelf_path.to_string_lossy()
))
}
}
/// Returns a ProcessBuilder which runs the specified command. Uses "cmd" on windows in order to
/// allow execution of batch files.
pub fn script_process(cmd: impl AsRef<OsStr>) -> ProcessBuilder {
if cfg!(target_os = "windows") {
let mut pb = process("cmd");
pb.arg("/C").arg(cmd);
pb
} else {
process(cmd)
}
}
#[cfg(all(target_os = "windows", target_pointer_width = "64"))]
const HOST_TAG: &str = "windows-x86_64";
#[cfg(all(target_os = "windows", target_pointer_width = "32"))]
const HOST_TAG: &str = "windows";
#[cfg(target_os = "linux")]
const HOST_TAG: &str = "linux-x86_64";
#[cfg(target_os = "macos")]
const HOST_TAG: &str = "darwin-x86_64";
// These are executable suffixes used to simplify building commands.
// On non-windows platforms they are empty.
#[cfg(target_os = "windows")]
const EXECUTABLE_SUFFIX_EXE: &str = ".exe";
#[cfg(not(target_os = "windows"))]
const EXECUTABLE_SUFFIX_EXE: &str = "";
#[cfg(target_os = "windows")]
const EXECUTABLE_SUFFIX_CMD: &str = ".cmd";
#[cfg(not(target_os = "windows"))]
const EXECUTABLE_SUFFIX_CMD: &str = "";
#[cfg(target_os = "windows")]
pub const EXECUTABLE_SUFFIX_BAT: &str = ".bat";
#[cfg(not(target_os = "windows"))]
pub const EXECUTABLE_SUFFIX_BAT: &str = "";
|
use crate::{BlockNumber, Capacity, CellOutput, Uint64};
use ckb_types::H256;
use serde::{Deserialize, Serialize};
// This is used as return value of get_live_cells_by_lock_hash RPC
#[derive(Debug, Serialize, Deserialize)]
pub struct LiveCell {
pub created_by: TransactionPoint,
pub cell_output: CellOutput,
pub output_data_len: Uint64,
pub cellbase: bool,
}
// This is used as return value of get_transactions_by_lock_hash RPC
#[derive(Debug, Serialize, Deserialize)]
pub struct CellTransaction {
pub created_by: TransactionPoint,
pub consumed_by: Option<TransactionPoint>,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct TransactionPoint {
pub block_number: BlockNumber,
pub tx_hash: H256,
pub index: Uint64,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct LockHashIndexState {
pub lock_hash: H256,
pub block_number: BlockNumber,
pub block_hash: H256,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct LockHashCapacity {
pub capacity: Capacity,
pub cells_count: Uint64,
pub block_number: BlockNumber,
}
|
#![feature(box_syntax, box_patterns, slice_patterns, advanced_slice_patterns, core)]
#[macro_use] extern crate ast;
extern crate rbtree;
extern crate ivar;
// pub use middle::{Universe, Package, PackageRef};
pub use ast::{name, span, error};
macro_rules! matches {
($p: pat, $e: expr) => (if let $p = $e { true } else { false });
}
pub mod arena;
pub mod middle;
pub mod lang_items;
pub mod name_resolve;
pub mod collect_types;
pub mod collect_members;
pub mod tycheck;
pub mod typed_walker;
pub mod ordering;
pub mod reachability;
pub mod uses;
pub mod eval;
|
// Copyright 2021 IPSE Developer.
// This file is part of IPSE
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![cfg_attr(not(feature = "std"), no_std)]
extern crate frame_system as system;
extern crate pallet_timestamp as timestamp;
use crate::constants::time::HOURS;
use crate::poc_staking as staking;
use crate::poc_staking::AccountIdOfPid;
use crate::poc_staking::DeclaredCapacity;
use num_traits::Zero;
use sp_std::collections::btree_set::BTreeSet;
use sp_std::convert::{Into, TryFrom, TryInto};
use codec::{Decode, Encode};
use frame_support::{
debug, decl_error, decl_event, decl_module, decl_storage,
dispatch::{DispatchError, DispatchResult},
ensure,
traits::{Currency, Get, Imbalance, OnUnbalanced, ReservableCurrency},
weights::Weight,
IterableStorageMap, StorageMap, StorageValue,
};
use pallet_treasury as treasury;
use sp_runtime::{
traits::{CheckedAdd, CheckedDiv, CheckedSub, SaturatedConversion, Saturating},
Percent,
};
use sp_std::result;
use sp_std::vec;
use sp_std::vec::Vec;
use system::{ensure_root, ensure_signed};
use crate::ipse_traits::PocHandler;
use conjugate_poc::{
nonce::noncegen_rust,
poc_hashing::{calculate_scoop, find_best_deadline_rust},
};
use crate::constants::{
currency::DOLLARS,
time::{DAYS, MILLISECS_PER_BLOCK},
};
/// block numbers of a year
pub const YEAR: u32 = 365 * DAYS;
pub const GIB: u64 = 1024 * 1024 * 1024;
/// you should not modify the SPEED and the MiningExpire
pub const SPEED: u64 = 11;
pub const MiningExpire: u64 = 2;
type BalanceOf<T> =
<<T as staking::Trait>::StakingCurrency as Currency<<T as system::Trait>::AccountId>>::Balance;
type PositiveImbalanceOf<T> = <<T as staking::Trait>::StakingCurrency as Currency<
<T as system::Trait>::AccountId,
>>::PositiveImbalance;
pub trait Trait: system::Trait + timestamp::Trait + treasury::Trait + staking::Trait {
type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>;
type PocAddOrigin: OnUnbalanced<PositiveImbalanceOf<Self>>;
/// GENESIS_BASE_TARGET
type GENESIS_BASE_TARGET: Get<u64>;
type TotalMiningReward: Get<BalanceOf<Self>>;
type ProbabilityDeviationValue: Get<Percent>;
type MaxDeadlineValue: Get<u64>;
}
#[derive(Encode, Decode, Clone, Debug, Default, PartialEq, Eq)]
pub struct MiningInfo<AccountId> {
// when miner is None, it means Treasury
pub miner: Option<AccountId>,
pub best_dl: u64,
pub block: u64,
}
#[derive(Encode, Decode, Clone, Debug, Default, PartialEq, Eq)]
pub struct MiningHistory<Balance, BlockNumber> {
total_num: u64,
history: Vec<(BlockNumber, Balance)>,
}
#[derive(Encode, Decode, Clone, Debug, Default, PartialEq, Eq)]
pub struct Difficulty {
pub base_target: u64,
pub net_difficulty: u64,
// the block height of adjust difficulty
pub block: u64,
}
decl_storage! {
trait Store for Module<T: Trait> as PoC {
/// difficulties of some duration(50 blocks).
pub TargetInfo get(fn target_info): Vec<Difficulty>;
/// deadlines of the mining.
pub DlInfo get(fn dl_info): Vec<MiningInfo<T::AccountId>>;
/// the mining history of miners.
pub History get(fn history): map hasher(twox_64_concat) T::AccountId => Option<MiningHistory<BalanceOf<T>, T::BlockNumber>>;
/// the reward history of users.
pub UserRewardHistory get(fn user_reward_history): map hasher(twox_64_concat) T::AccountId => Vec<(T::BlockNumber, BalanceOf<T>)>;
/// the net power(how much capacity)
pub NetPower get(fn net_power): u64;
/// how much capacity that one difficulty.
pub CapacityOfPerDifficulty get(fn capacity_of_per_difficult): u64 = 5;
/// how often to adjust difficulty.
pub AdjustDifficultyDuration get(fn adjust_difficulty_duration): u64 = 50;
/// how much IPSE that one Gib should staking.
pub CapacityPrice get(fn capacity_price): BalanceOf<T> = 10.saturated_into::<BalanceOf<T>>() * DOLLARS.saturated_into::<BalanceOf<T>>();
/// active miners (now_count, [account_id..], last_count, [account_id..])
pub ActiveMiners get(fn active_miners): (u32, BTreeSet<T::AccountId>, u32, BTreeSet<T::AccountId>);
}
}
decl_event! {
pub enum Event<T>
where
AccountId = <T as system::Trait>::AccountId,
Balance = <<T as staking::Trait>::StakingCurrency as Currency<<T as system::Trait>::AccountId>>::Balance,
{
Minning(AccountId, u64),
Verify(AccountId, bool),
// RewardTreasury(AccountId, Balance),
SetDifficulty(u64),
SetCapacityOfPerDifficulty(u64),
SetAdjustDifficultyDuration(u64),
SetCapacityPrice(Balance),
}
}
decl_module! {
pub struct Module<T: Trait> for enum Call where origin: T::Origin {
type Error = Error<T>;
fn deposit_event() = default;
const GENESIS_BASE_TARGET: u64 = T::GENESIS_BASE_TARGET::get();
/// poc mining total reward.
const TotalMiningReward: BalanceOf<T> = T::TotalMiningReward::get();
/// the max deviation value of the mining probability。
const ProbabilityDeviationValue: Percent = T::ProbabilityDeviationValue::get();
/// max deadine(you should not submit the value up this value).
const MaxDeadlineValue: u64 = T::MaxDeadlineValue::get();
/// set the difficulty
#[weight = 10_000]
fn set_difficulty(origin, difficulty: u64) {
ensure_root(origin)?;
ensure!(difficulty != 0u64, Error::<T>::DifficultyIsZero);
ensure!(difficulty <= T::GENESIS_BASE_TARGET::get(), Error::<T>::DifficultyIsTooLarge);
let base_target = T::GENESIS_BASE_TARGET::get() / difficulty;
let now = <staking::Module<T>>::now().saturated_into::<u64>();
Self::append_target_info(Difficulty{
block: now,
base_target: base_target,
net_difficulty: T::GENESIS_BASE_TARGET::get() / base_target,
});
Self::deposit_event(RawEvent::SetDifficulty(base_target));
}
/// how often to adjust the difficulty.
#[weight = 10_000]
fn set_adjust_difficulty_duration(origin, block_num: u64) {
ensure_root(origin)?;
ensure!(block_num > 0u64, Error::<T>::DurationIsZero);
<AdjustDifficultyDuration>::put(block_num);
Self::deposit_event(RawEvent::SetAdjustDifficultyDuration(block_num));
}
/// how much IPSE that one Gib should staking.
#[weight = 10_000]
fn set_capacity_price(origin, price: BalanceOf<T>) {
ensure_root(origin)?;
<CapacityPrice<T>>::put(price);
Self::deposit_event(RawEvent::SetCapacityPrice(price));
}
/// how much capacity that one difficulty.
#[weight = 10_000]
fn set_capacity_of_per_difficulty(origin, capacity: u64) {
ensure_root(origin)?;
ensure!(capacity != 0u64, Error::<T>::CapacityIsZero);
<CapacityOfPerDifficulty>::put(capacity);
Self::deposit_event(RawEvent::SetCapacityOfPerDifficulty(capacity));
}
/// submit deadline.
#[weight = 50_000_000 as Weight + T::DbWeight::get().reads(8 as Weight).saturating_add(T::DbWeight::get().writes(3 as Weight))]
fn mining(origin, account_id: u64, height: u64, sig: [u8; 32], nonce: u64, deadline: u64) -> DispatchResult {
let miner = ensure_signed(origin)?;
<ActiveMiners<T>>::mutate(|h| if h.1.insert(miner.clone()) {
h.0 += 1;
});
debug::info!("miner: {:?}, submit deadline!, height = {}, deadline = {}", miner.clone(), height, deadline);
ensure!(deadline <= T::MaxDeadlineValue::get(), Error::<T>::DeadlineTooLarge);
ensure!(<staking::Module<T>>::is_can_mining(miner.clone())?, Error::<T>::NotRegister);
let real_pid = <staking::Module<T>>::disk_of(&miner).unwrap().numeric_id;
ensure!(real_pid == account_id.into(), Error::<T>::PidErr);
let current_block = <system::Module<T>>::block_number().saturated_into::<u64>();
debug::info!("starting Verify Deadline !!!");
if !(current_block / MiningExpire == height / MiningExpire && current_block >= height)
{
debug::info!("expire! :{:?}, off chain get info block: {:?}, deadline is: {:?}", height, current_block, deadline);
return Err(Error::<T>::HeightNotInDuration)?;
}
let dl = Self::dl_info();
let (block, best_dl) = if let Some(dl_info) = dl.iter().last() {
(dl_info.clone().block, dl_info.best_dl)
} else {
(0, core::u64::MAX)
};
// Someone(miner) has mined a better deadline at this mining cycle before.
if best_dl <= deadline && current_block / MiningExpire == block / MiningExpire {
debug::info!("not best deadline! best_dl = {}, submit deadline = {}!", best_dl, deadline);
return Err(Error::<T>::NotBestDeadline)?;
}
let verify_ok = Self::verify_dl(account_id, height, sig, nonce, deadline);
if verify_ok.0 {
debug::info!("verify is ok!, deadline = {}", deadline);
if current_block / MiningExpire == block / MiningExpire {
DlInfo::<T>::mutate(|dl| dl.pop());
}
Self::append_dl_info(MiningInfo{
miner: Some(miner.clone()),
best_dl: deadline,
block: current_block,
});
Self::deposit_event(RawEvent::Minning(miner, deadline));
}
else {
debug::info!("verify failed! deadline = {:?}, target = {:?}, base_target = {:?}", verify_ok.1 / verify_ok.2, verify_ok.1, verify_ok.2);
return Err(Error::<T>::VerifyFaile)?;
}
Ok(())
}
fn on_initialize(n: T::BlockNumber) -> Weight{
if n == T::BlockNumber::from(1u32) {
Self::append_target_info(Difficulty{
base_target: T::GENESIS_BASE_TARGET::get(),
net_difficulty: 1,
block: 1,
});
}
0
}
fn on_finalize(n: T::BlockNumber) {
if (n % ( 8 * HOURS).saturated_into::<T::BlockNumber>()).is_zero() {
<ActiveMiners<T>>::mutate(|h| {
h.2 = h.0.clone();
h.3 = h.1.clone();
h.1.clear();
h.0 = 0u32;
});
}
let current_block = n.saturated_into::<u64>();
let last_mining_block = Self::get_last_mining_block();
debug::info!("current-block = {}, last-mining-block = {}", current_block, last_mining_block);
let reward_result = Self::get_reward_amount();
let mut reward: BalanceOf<T>;
if reward_result.is_ok() {
reward = reward_result.unwrap();
}
else {
return
}
if (current_block + 1) % MiningExpire == 0 {
if current_block / MiningExpire == last_mining_block / MiningExpire {
if let Some(miner_info) = Self::dl_info().last() {
let miner: Option<T::AccountId> = miner_info.clone().miner;
if miner.is_some() {
Self::reward(miner.unwrap(), reward);
debug::info!("<<REWARD>> miner on block {}, last_mining_block {}", current_block, last_mining_block);
}
}
}
else {
Self::treasury_minning(current_block);
Self::reward_treasury(reward);
}
}
if current_block % <AdjustDifficultyDuration>::get() == 0 {
Self::adjust_difficulty(current_block);
}
Self::get_total_capacity();
}
}
}
impl<T: Trait> Module<T> {
fn adjust_difficulty(block: u64) {
debug::info!("[ADJUST] difficulty on block {}", block);
let last_base_target = Self::get_last_base_target().0;
let last_net_difficulty = Self::get_last_base_target().1;
let ave_deadline = Self::get_ave_deadline().1;
let mining_count = Self::get_ave_deadline().0;
if (ave_deadline < 2000 && mining_count > 0) {
let mut new = last_base_target.saturating_mul(10) / SPEED;
if new == 0 {
new = 1;
}
debug::info!("[DIFFICULTY] make more difficult, base_target = {:?}", new);
Self::append_target_info(Difficulty {
block,
base_target: new,
net_difficulty: T::GENESIS_BASE_TARGET::get() / new,
});
} else if ave_deadline > 3000 {
let new = last_base_target.saturating_mul(SPEED) / 10;
Self::append_target_info(Difficulty {
block,
base_target: new,
net_difficulty: T::GENESIS_BASE_TARGET::get() / new,
});
debug::info!("[DIFFICULTY] make easier, base_target = {}", new);
} else {
let new = last_base_target;
debug::info!("[DIFFICULTY] use avg, base_target = {}", new);
Self::append_target_info(Difficulty {
block,
base_target: new,
net_difficulty: T::GENESIS_BASE_TARGET::get() / new,
});
}
}
fn treasury_minning(current_block: u64) {
Self::append_dl_info(MiningInfo {
miner: None,
best_dl: T::MaxDeadlineValue::get(),
block: current_block,
});
debug::info!("<<REWARD>> treasury on block {}", current_block);
}
fn get_current_base_target() -> u64 {
let ti = Self::target_info();
ti.iter().last().unwrap().base_target
}
fn get_last_mining_block() -> u64 {
let dl = Self::dl_info();
if let Some(dl) = dl.iter().last() {
dl.block
} else {
0
}
}
fn get_last_base_target() -> (u64, u64) {
let ti = Self::target_info();
if let Some(info) = ti.iter().last() {
(info.base_target, info.net_difficulty)
} else {
(T::GENESIS_BASE_TARGET::get(), 1u64)
}
}
fn get_ave_deadline() -> (u64, u64) {
let dl = Self::dl_info();
let mut iter = dl.iter().rev();
let mut count = 0_u64;
let mut real_count = 0_u64;
let mut deadline = 0_u64;
while let Some(dl) = iter.next() {
if count == <AdjustDifficultyDuration>::get() / MiningExpire {
break
}
if dl.miner.is_some() {
real_count += 1;
deadline += dl.best_dl;
}
count += 1;
}
if real_count == 0 {
(0, 0u64)
} else {
(real_count, deadline / real_count)
}
}
fn verify_dl(
account_id: u64,
height: u64,
sig: [u8; 32],
nonce: u64,
deadline: u64,
) -> (bool, u64, u64) {
let scoop_data = calculate_scoop(height, &sig) as u64;
debug::info!("scoop_data: {:?}", scoop_data);
debug::info!("sig: {:?}", sig);
let mut cache = vec![0_u8; 262144];
noncegen_rust(&mut cache[..], account_id, nonce, 1);
let mirror_scoop_data = Self::gen_mirror_scoop_data(scoop_data, cache);
let (target, _) = find_best_deadline_rust(mirror_scoop_data.as_ref(), 1, &sig);
debug::info!("target: {:?}", target);
let base_target = Self::get_current_base_target();
let deadline_ = target / base_target;
debug::info!("deadline: {:?}", deadline_);
(deadline == target / base_target, target, base_target)
}
fn gen_mirror_scoop_data(scoop_data: u64, cache: Vec<u8>) -> Vec<u8> {
let addr = 64 * scoop_data as usize;
let mirror_scoop = 4095 - scoop_data as usize;
let mirror_addr = 64 * mirror_scoop as usize;
let mut mirror_scoop_data = vec![0_u8; 64];
mirror_scoop_data[0..32].clone_from_slice(&cache[addr..addr + 32]);
mirror_scoop_data[32..64].clone_from_slice(&cache[mirror_addr + 32..mirror_addr + 64]);
mirror_scoop_data
}
fn get_treasury_id() -> T::AccountId {
<treasury::Module<T>>::account_id()
}
fn get_reward_amount() -> result::Result<BalanceOf<T>, DispatchError> {
let now = <staking::Module<T>>::now();
let sub_half_reward_time = 2u32;
let year = now.checked_div(&T::BlockNumber::from(YEAR)).ok_or(Error::<T>::DivZero)?;
let duration = year / T::BlockNumber::from(sub_half_reward_time);
let duration = <<T as system::Trait>::BlockNumber as TryInto<u32>>::try_into(duration)
.map_err(|_| Error::<T>::ConvertErr)? +
1u32;
let n_opt = sub_half_reward_time.checked_pow(duration);
let reward: BalanceOf<T>;
if n_opt.is_some() {
let n = <BalanceOf<T>>::from(n_opt.unwrap());
reward = T::TotalMiningReward::get() /
n / 2.saturated_into::<BalanceOf<T>>() /
Self::block_convert_to_balance(T::BlockNumber::from(YEAR))?;
Ok(reward * MiningExpire.saturated_into::<BalanceOf<T>>())
} else {
Ok(<BalanceOf<T>>::from(0u32))
}
}
fn block_convert_to_balance(n: T::BlockNumber) -> result::Result<BalanceOf<T>, DispatchError> {
let n_u = <<T as system::Trait>::BlockNumber as TryInto<u32>>::try_into(n)
.map_err(|_| Error::<T>::ConvertErr)?;
let n_b =
<BalanceOf<T> as TryFrom<u32>>::try_from(n_u).map_err(|_| Error::<T>::ConvertErr)?;
Ok(n_b)
}
fn reward_treasury(reward: BalanceOf<T>) {
let account_id = Self::get_treasury_id();
T::PocAddOrigin::on_unbalanced(T::StakingCurrency::deposit_creating(&account_id, reward));
// Self::deposit_event(RawEvent::RewardTreasury(account_id, reward));
}
fn reward(miner: T::AccountId, mut reward: BalanceOf<T>) -> DispatchResult {
let all_reward = reward.clone();
let machine_info = <staking::Module<T>>::disk_of(&miner).ok_or(Error::<T>::NotRegister)?;
let disk = machine_info.clone().plot_size;
let update_time = machine_info.clone().update_time;
let mut miner_mining_num = match <History<T>>::get(&miner) {
Some(h) => h.total_num + 1u64,
None => 1u64,
};
let now = <staking::Module<T>>::now();
let staking_info_opt = <staking::Module<T>>::staking_info_of(&miner);
if staking_info_opt.is_some() {
let total_staking = staking_info_opt.unwrap().total_staking;
let miner_should_staking_amount =
disk.saturated_into::<BalanceOf<T>>().saturating_mul(<CapacityPrice<T>>::get()) /
GIB.saturated_into::<BalanceOf<T>>();
if miner_should_staking_amount <= total_staking {
debug::info!("miner's staking enough!staking enough = {:?} ", total_staking);
let mut net_mining_num = (now - update_time).saturated_into::<u64>() / MiningExpire;
if net_mining_num < miner_mining_num {
net_mining_num = miner_mining_num
}
debug::info!(
"miner: {:?}, mining probability: {:?} / {:?}",
miner.clone(),
miner_mining_num,
net_mining_num
);
let net_should_staking_total_amount = Self::get_total_capacity()
.saturated_into::<BalanceOf<T>>()
.saturating_mul(<CapacityPrice<T>>::get()) /
GIB.saturated_into::<BalanceOf<T>>();
if (miner_mining_num
.saturated_into::<BalanceOf<T>>()
.saturating_mul(net_should_staking_total_amount) >
(net_mining_num.saturated_into::<BalanceOf<T>>() *
miner_should_staking_amount)
.saturating_add(
T::ProbabilityDeviationValue::get() *
(net_mining_num.saturated_into::<BalanceOf<T>>() *
miner_should_staking_amount),
)) || ((net_mining_num.saturated_into::<BalanceOf<T>>() *
miner_should_staking_amount)
.saturating_sub(
T::ProbabilityDeviationValue::get() *
net_mining_num.saturated_into::<BalanceOf<T>>() *
miner_should_staking_amount,
) > miner_mining_num
.saturated_into::<BalanceOf<T>>()
.saturating_mul(net_should_staking_total_amount))
{
debug::info!("Miners: {:?} have a high probability of mining, and you should increase the disk space", miner.clone());
reward = Percent::from_percent(10) * reward;
Self::reward_staker(miner.clone(), reward);
Self::reward_treasury(Percent::from_percent(90) * all_reward);
} else {
debug::info!("Get all reward.");
reward = reward;
Self::reward_staker(miner.clone(), reward);
}
} else {
debug::info!("Get 10% reward.");
reward = Percent::from_percent(10) * reward;
Self::reward_staker(miner.clone(), reward);
Self::reward_treasury(Percent::from_percent(90) * all_reward);
}
} else {
debug::info!("miner have no staking info.");
reward = Percent::from_percent(10) * reward;
Self::reward_staker(miner.clone(), reward);
Self::reward_treasury(Percent::from_percent(90) * all_reward);
}
let history_opt = <History<T>>::get(&miner);
if history_opt.is_some() {
let mut his = history_opt.unwrap();
his.total_num = miner_mining_num;
his.history.push((now, reward));
if his.history.len() >= 300 {
let mut old_history = his.history.clone();
let new_history = old_history.split_off(1);
his.history = new_history;
}
<History<T>>::insert(miner.clone(), his);
} else {
let history = vec![(now, reward)];
<History<T>>::insert(
miner.clone(),
MiningHistory { total_num: miner_mining_num, history },
);
}
Ok(())
}
fn reward_staker(miner: T::AccountId, reward: BalanceOf<T>) -> DispatchResult {
let now = <staking::Module<T>>::now();
let staking_info =
<staking::Module<T>>::staking_info_of(&miner).ok_or(Error::<T>::NotRegister)?;
let stakers = staking_info.clone().others;
if stakers.len() == 0 {
Self::reward_miner(miner.clone(), reward, now);
} else {
let miner_reward = staking_info.clone().miner_proportion * reward;
Self::reward_miner(miner.clone(), miner_reward, now);
let stakers_reward = reward - miner_reward;
let total_staking = staking_info.clone().total_staking;
for staker_info in stakers.iter() {
let staker_reward = stakers_reward
.saturating_mul(staker_info.clone().1)
.checked_div(&total_staking)
.ok_or(Error::<T>::DivZero)?;
if staker_info.clone().0 == miner.clone() {
Self::reward_miner(miner.clone(), staker_reward, now);
} else {
T::PocAddOrigin::on_unbalanced(T::StakingCurrency::deposit_creating(
&staker_info.clone().0,
staker_reward,
));
Self::update_reword_history(staker_info.clone().0, staker_reward, now);
}
}
}
Ok(())
}
fn get_total_capacity() -> u64 {
let mut old_target_info_vec = <TargetInfo>::get();
let len = old_target_info_vec.len();
if len > 6 {
let new_target_info = old_target_info_vec.split_off(len - 6);
old_target_info_vec = new_target_info;
}
let len = old_target_info_vec.len() as u64;
let mut total_difficulty = 0u64;
for i in old_target_info_vec.iter() {
total_difficulty += i.net_difficulty;
}
let mut avg_difficulty = 0;
if len == 0 {
avg_difficulty = 0;
} else {
avg_difficulty = total_difficulty / len;
}
let capacity = avg_difficulty.saturating_mul(GIB * <CapacityOfPerDifficulty>::get());
<NetPower>::put(capacity);
return capacity
}
fn reward_miner(miner: T::AccountId, amount: BalanceOf<T>, now: T::BlockNumber) {
let disk = <staking::Module<T>>::disk_of(&miner).unwrap();
let reward_dest = disk.reward_dest;
if reward_dest == miner.clone() {
T::PocAddOrigin::on_unbalanced(T::StakingCurrency::deposit_creating(
&reward_dest,
amount,
));
Self::update_reword_history(reward_dest.clone(), amount, now);
} else {
let miner_reward = Percent::from_percent(10) * amount;
T::PocAddOrigin::on_unbalanced(T::StakingCurrency::deposit_creating(
&miner,
miner_reward,
));
Self::update_reword_history(miner, miner_reward, now);
let dest_reward = amount.saturating_sub(miner_reward);
T::PocAddOrigin::on_unbalanced(T::StakingCurrency::deposit_creating(
&reward_dest,
dest_reward,
));
Self::update_reword_history(reward_dest, dest_reward, now);
}
}
fn update_reword_history(
account_id: T::AccountId,
amount: BalanceOf<T>,
block_num: T::BlockNumber,
) {
let mut reward_history = <UserRewardHistory<T>>::get(account_id.clone());
reward_history.push((block_num, amount));
if reward_history.len() >= 300 {
let mut old_history = reward_history.clone();
let new_history = old_history.split_off(1);
<UserRewardHistory<T>>::insert(account_id, new_history);
} else {
<UserRewardHistory<T>>::insert(account_id, reward_history);
}
}
fn append_dl_info(dl_info: MiningInfo<T::AccountId>) {
let mut old_dl_info_vec = <DlInfo<T>>::get();
let len = old_dl_info_vec.len();
old_dl_info_vec.push(dl_info);
if len >= 2000 {
let new_dl_info = old_dl_info_vec.split_off(len - 2000);
old_dl_info_vec = new_dl_info;
}
<DlInfo<T>>::put(old_dl_info_vec);
}
fn append_target_info(difficulty: Difficulty) {
let mut old_target_info_vec = <TargetInfo>::get();
let len = old_target_info_vec.len();
old_target_info_vec.push(difficulty);
if len >= 50 {
let new_target_info = old_target_info_vec.split_off(len - 50);
old_target_info_vec = new_target_info;
}
<TargetInfo>::put(old_target_info_vec);
}
}
impl<T: Trait> PocHandler<T::AccountId> for Module<T> {
fn remove_history(miner: T::AccountId) {
<History<T>>::remove(miner);
}
}
decl_error! {
/// Error for the ipse module.
pub enum Error for Module<T: Trait> {
/// 0 can't be a divisor.
DivZero,
/// not register.
NotRegister,
/// not your plot id.
PidErr,
/// data type conversion error
ConvertErr,
/// submit deadline too delay.
HeightNotInDuration,
/// not best deadline
NotBestDeadline,
/// deadline verify failed.
VerifyFaile,
/// the capacity should not empty.
CapacityIsZero,
/// submit deadline up max value.
DeadlineTooLarge,
/// the block number should not zero.
DurationIsZero,
/// the difficulty should not zero.
DifficultyIsZero,
/// the difficulty up max value.
DifficultyIsTooLarge,
}
}
|
use crate::{
commands::osu::MatchResult,
embeds::Footer,
util::constants::{DESCRIPTION_SIZE, OSU_BASE},
};
use rosu_v2::model::matches::OsuMatch;
use std::{borrow::Cow, fmt::Write};
pub struct MatchCostEmbed {
description: String,
thumbnail: String,
title: String,
url: String,
footer: Footer,
}
impl MatchCostEmbed {
pub fn new(
osu_match: &mut OsuMatch,
description: Option<String>,
match_result: Option<MatchResult>,
) -> Option<Self> {
let mut thumbnail = String::new();
let description = if let Some(description) = description {
description
} else {
let mut medals = vec!["🥇", "🥈", "🥉"];
let mut description = String::with_capacity(256);
match match_result {
Some(MatchResult::TeamVS {
match_scores,
blue,
red,
mvp_avatar_url,
}) => {
// "Title"
let _ = writeln!(description,
"**{word} score:** :blue_circle: {blue_stars}{blue_score}{blue_stars} - {red_stars}{red_score}{red_stars} :red_circle:\n",
word = if osu_match.end_time.is_some() { "Final" } else { "Current" },
blue_score = match_scores.blue(),
red_score = match_scores.red(),
blue_stars = if match_scores.blue() > match_scores.red() { "**" } else { "" },
red_stars = if match_scores.blue() < match_scores.red() { "**" } else { "" },
);
// Blue team
let _ = writeln!(description, ":blue_circle: **Blue Team** :blue_circle:");
for (i, (id, cost)) in blue.into_iter().enumerate() {
let name = match osu_match.users.get(&id) {
Some(user) => Cow::Borrowed(user.username.as_str()),
None => Cow::Owned(format!("User id {id}")),
};
let medal = {
let mut idx = 0;
while idx < medals.len() {
let red_cost = red.get(idx).map(|(.., cost)| *cost).unwrap_or(0.0);
if cost > red_cost {
break;
}
idx += 1;
}
if idx < medals.len() {
medals.remove(idx)
} else {
""
}
};
let _ = writeln!(
description,
"**{idx}**: [{name}]({base}users/{user_id}) - **{cost:.2}** {medal}",
idx = i + 1,
name = name,
base = OSU_BASE,
user_id = id,
cost = cost,
medal = medal,
);
}
// Red team
let _ = writeln!(description, "\n:red_circle: **Red Team** :red_circle:");
for (i, (id, cost)) in red.into_iter().enumerate() {
let name = match osu_match.users.get(&id) {
Some(user) => Cow::Borrowed(user.username.as_str()),
None => Cow::Owned(format!("User id {id}")),
};
let medal = if !medals.is_empty() {
medals.remove(0)
} else {
""
};
let _ = writeln!(
description,
"**{idx}**: [{name}]({base}users/{user_id}) - **{cost:.2}** {medal}",
idx = i + 1,
name = name,
base = OSU_BASE,
user_id = id,
cost = cost,
medal = medal,
);
}
thumbnail = mvp_avatar_url;
}
Some(MatchResult::HeadToHead {
players,
mvp_avatar_url,
}) => {
for (i, (id, cost)) in players.into_iter().enumerate() {
let name = match osu_match.users.get(&id) {
Some(user) => Cow::Borrowed(user.username.as_str()),
None => Cow::Owned(format!("User id {id}")),
};
let _ = writeln!(
description,
"**{idx}**: [{name}]({base}users/{user_id}) - **{cost:.2}** {medal}",
idx = i + 1,
name = name,
base = OSU_BASE,
user_id = id,
cost = cost,
medal = medals.get(i).unwrap_or(&""),
);
}
thumbnail = mvp_avatar_url;
}
None => unreachable!(),
}
if description.len() >= DESCRIPTION_SIZE {
return None;
}
description
};
let match_id = osu_match.match_id;
let mut title = String::new();
std::mem::swap(&mut title, &mut osu_match.name);
title.retain(|c| c != '(' && c != ')');
let footer = Footer::new("Note: Formula is subject to change; values are volatile");
Some(Self {
title,
footer,
thumbnail,
description,
url: format!("{OSU_BASE}community/matches/{match_id}"),
})
}
}
impl_builder!(MatchCostEmbed {
description,
footer,
thumbnail,
title,
url,
});
|
pub mod config;
pub mod factories;
pub mod assembly;
pub mod launch;
pub mod robot; |
use general::FreeMonoid;
use general::SemiGroup;
use general::TryReader;
use token::Token;
use sourcecode::Code;
use sourcecode::Span;
use parse::SyntaxTree;
use parse::Func;
pub struct Root {
pub funcs: Vec<Func>,
}
impl SyntaxTree for Root {
fn parse(mut token_reader: &mut TryReader<Code<Token>>)
-> Result<Root, (Option<Span>, String)> {
let mut funcs = Vec::new();
while token_reader.has_next() {
match Func::parse(&mut token_reader) {
Ok(func) => funcs.push(func),
Err(err) => return Err(err),
}
token_reader.drop_while(|token| token.value == Token::LineBreak);
}
Ok(Root{funcs})
}
fn span(&self) -> Span {
self.funcs
.iter()
.map(|stmt| stmt.span())
.map(FreeMonoid::Some)
.fold(FreeMonoid::Zero, |acc, x| acc.plus(&x))
.get()
.unwrap()
.clone()
}
} |
extern crate clap;
extern crate egsphsp;
extern crate rand;
extern crate cpu_time;
use std::path::Path;
use std::error::Error;
use std::process::exit;
use std::f32;
use std::fs::File;
use clap::{App, AppSettings, SubCommand, Arg};
use egsphsp::PHSPReader;
use egsphsp::{transform, Transform, combine,sample};
use rand::Rng;
use cpu_time::ProcessTime;
use std::time::Duration;
fn floatify(s: &str) -> f32 {
s.trim().trim_start_matches("(").trim_end_matches(")").trim().parse::<f32>().unwrap()
}
fn main() {
let matches = App::new("phasespace")
.version("0.0.1")
.author("Stevan Pecic <stevan.pecic@icloud.com>")
.about("Transform and inspect .egsphsp \
files")
.setting(AppSettings::SubcommandRequiredElseHelp)
.subcommand(SubCommand::with_name("print")
.about("Print the specified fields in the specified order for n (or all) records")
.arg(Arg::with_name("fields")
.long("field")
.short("f")
.takes_value(true)
.required(true)
.multiple(true))
.arg(Arg::with_name("number")
.long("number")
.short("n")
.takes_value(true)
.default_value("10"))
.arg(Arg::with_name("input")
.takes_value(true)
.required(true)))
.subcommand(SubCommand::with_name("twist")
.about("Rotate r times by a random increment")
.arg(Arg::with_name("input")
.takes_value(true)
.required(true)
.help("Input phsp file"))
.arg(Arg::with_name("iterations")
.short("r")
.takes_value(true)
.long("iterations")
.required(true)
.help("Number of iterations")))
.subcommand(SubCommand::with_name("sample")
.about("Sample particles from phase space - does not \
adjust weights")
.arg(Arg::with_name("input")
.required(true)
.multiple(true))
.arg(Arg::with_name("output")
.short("o")
.long("output")
.takes_value(true)
.required(true))
.arg(Arg::with_name("seed")
.long("seed")
.help("Seed as an unsigned integer")
.default_value("0")
.required(false))
.arg(Arg::with_name("rate")
.default_value("10")
.required(false)
.long("rate")
.takes_value(true)
.help("Inverse sample rate - 10 means take rougly 1 out of every 10 particles")))
.subcommand(SubCommand::with_name("info")
.about("Basic information on phase space file")
.arg(Arg::with_name("input").required(true))
.arg(Arg::with_name("format")
.default_value("human")
.possible_values(&["human", "json"])
.long("format")
.takes_value(true)
.help("Output information in json or human format")))
.subcommand(SubCommand::with_name("combine")
.about("Combine phase space from one or more input files into outputfile")
.arg(Arg::with_name("input")
.required(true)
.multiple(true))
.arg(Arg::with_name("output")
.short("o")
.long("output")
.takes_value(true)
.required(true))
.arg(Arg::with_name("delete")
.short("d")
.long("delete")
.help("Delete input files as they are used (no going back!)")))
.subcommand(SubCommand::with_name("shout")
.about("Combine phase space files from twist algorithm")
.arg(Arg::with_name("input")
.takes_value(true)
.multiple(true))
.arg(Arg::with_name("output")
.default_value("tns.egsphsp1")
.short("o")
.long("output")
.takes_value(true)))
.subcommand(SubCommand::with_name("rotate")
.about("Rotate by --angle radians counter clockwise around z axis")
.arg(Arg::with_name("in-place")
.short("i")
.long("in-place")
.help("Transform input file in-place"))
.arg(Arg::with_name("angle")
.short("a")
.long("angle")
.takes_value(true)
.required(true)
.help("Counter clockwise angle in radians to rotate around Z axis"))
.arg(Arg::with_name("input")
.help("Phase space file")
.required(true))
.arg(Arg::with_name("output")
.help("Output file")
.required_unless("in-place")))
.get_matches();
let subcommand = matches.subcommand_name().unwrap();
let result = if subcommand == "combine" {
// println!("combine");
let sub_matches = matches.subcommand_matches("combine").unwrap();
let input_paths: Vec<&Path> = sub_matches.values_of("input")
.unwrap()
.map(|s| Path::new(s))
.collect();
let output_path = Path::new(sub_matches.value_of("output").unwrap());
println!("combine {} files into {}",
input_paths.len(),
output_path.display());
combine(&input_paths, output_path, sub_matches.is_present("delete"))
} else if subcommand == "print" {
// prints the fields specified?
let sub_matches = matches.subcommand_matches("print").unwrap();
let input_path = Path::new(sub_matches.value_of("input").unwrap());
let number = sub_matches.value_of("number").unwrap().parse::<usize>().unwrap();
let fields: Vec<&str> = sub_matches.values_of("fields").unwrap().collect();
let file = File::open(input_path).unwrap();
let reader = PHSPReader::from(file).unwrap();
for field in fields.iter() {
print!("{:<16}", field);
}
println!("");
for record in reader.take(number).map(|r| r.unwrap()) {
for field in fields.iter() {
match field {
&"weight" => print!("{:<16}", record.get_weight()),
&"energy" => print!("{:<16}", record.total_energy()),
&"x" => print!("{:<16}", record.x_cm),
&"y" => print!("{:<16}", record.y_cm),
&"x_cos" => print!("{:<16}", record.x_cos),
&"y_cos" => print!("{:<16}", record.y_cos),
&"produced" => print!("{:<16}", record.bremsstrahlung_or_annihilation()),
&"charged" => print!("{:<16}", record.charged()),
&"r" => print!("{:<16}", (record.x_cm * record.x_cm + record.y_cm * record.y_cm).sqrt()),
_ => panic!("Unknown field {}", field)
};
}
println!("");
}
Ok(())
} else if subcommand == "shout" {
let sub_matches = matches.subcommand_matches("shout").unwrap();
let input_paths: Vec<&Path> = sub_matches.values_of("input")
.unwrap()
.map(|s| Path::new(s))
.collect();
let shout_output: String = "tns_output.egsphsp1".to_string();
let shout_output_path = Path::new(&shout_output);
println!("combining {} files into {}",
input_paths.len(),
shout_output_path.display());
combine(&input_paths, shout_output_path, true)
}
else if subcommand == "sample" {
let sub_matches = matches.subcommand_matches("sample").unwrap();
let input_paths: Vec<&Path> = sub_matches.values_of("input")
.unwrap()
.map(|s| Path::new(s))
.collect();
let output_path = Path::new(sub_matches.value_of("output").unwrap());
let rate = sub_matches.value_of("rate").unwrap().parse::<u32>().unwrap();
let seed: &[_] = &[sub_matches.value_of("seed").unwrap().parse::<usize>().unwrap()];
println!("sample {} file into {} at 1 in {}",
input_paths.len(),
output_path.display(),
rate);
sample(&input_paths, output_path, rate, seed)
}
else if subcommand == "info" {
let sub_matches = matches.subcommand_matches("info").unwrap();
let path = Path::new(sub_matches.value_of("input").unwrap());
let reader = PHSPReader::from(File::open(path).unwrap()).unwrap();
let header = reader.header;
if sub_matches.value_of("format").unwrap() == "json" {
println!("{{");
println!("\t\"total_particles\": {},", header.total_particles);
println!("\t\"total_photons\": {},", header.total_photons);
println!("\t\"maximum_energy\": {},", header.max_energy);
println!("\t\"minimum_energy\": {},", header.min_energy);
println!("\t\"total_particles_in_source\": {}",
header.total_particles_in_source);
println!("}}");
} else {
println!("Total particles: {}", header.total_particles);
println!("Total photons: {}", header.total_photons);
println!("Total electrons/positrons: {}",
header.total_particles - header.total_photons);
println!("Maximum energy: {:.*} MeV", 4, header.max_energy);
println!("Minimum energy: {:.*} MeV", 4, header.min_energy);
println!("Incident particles from source: {:.*}",
1,
header.total_particles_in_source);
}
Ok(())
} else {
let mut matrix = [[0.0; 3]; 3];
match subcommand {
"rotate" =>
{
let sub_matches = matches.subcommand_matches("rotate").unwrap();
let angle = floatify(sub_matches.value_of("angle").unwrap());
Transform::rotation(&mut matrix, angle);
let input_path = Path::new(sub_matches.value_of("input").unwrap());
if sub_matches.is_present("in-place") {
println!("rotate {} by {} radians", input_path.display(), angle);
transform(input_path, input_path, &matrix)
} else {
let output_path = Path::new(sub_matches.value_of("output").unwrap());
println!("rotate {} by {} radians and write to {}",
input_path.display(),
angle,
output_path.display());
transform(input_path, output_path, &matrix)
}
}
"twist" =>
{
let start = ProcessTime::now();
let sub_matches = matches.subcommand_matches("twist").unwrap();
let mut rng = rand::thread_rng();
let iteration = floatify(sub_matches.value_of("iterations").unwrap()) as i32;
let mut count = 1 as i32;
let input_path = Path::new(sub_matches.value_of("input").unwrap());
loop
{
let rand_seed: f32 = rng.gen();
let rand_angle: f32 = 6.28318 * rand_seed;
Transform::rotation(&mut matrix, rand_angle);
println!("");
println!("✦ Random angle is {} radians", rand_angle);
let mut rotation_output: String = count.to_string();
rotation_output.push_str(".egsphsp");
let rotation_output_path = Path::new(&rotation_output);
transform(input_path, rotation_output_path, &matrix); // Rotate file by random angle in radians & write to single_output_path
if count == iteration
{
println!("");
break
}
count = count + 1;
}
let cpu_time: Duration = start.elapsed();
println!("CPU time: {:?}", cpu_time);
Ok(())
}
_ => panic!("Invalid command"),
}
};
match result {
Ok(()) => exit(0),
Err(err) => {
println!("Error: {}", err.description());
exit(1);
}
};
}
|
/*
Tests for operand 0x18 (Push and Pop)
*/
#[cfg(test)]
use super::super::Dcpu;
#[test]
fn push() {
let mut dcpu = Dcpu::new();
let instruction: u16 = (0 << 10) | (0x18 << 5) | 0x1; // set push, a
dcpu.memory[0] = instruction;
dcpu.registers[0] = 5;
dcpu.step();
assert_eq!(dcpu.memory[0], instruction);
assert_eq!(dcpu.memory[0xffff], 5);
}
#[test]
fn pop() {
let mut dcpu = Dcpu::new();
let instruction: u16 = (0x18 << 10) | (0 << 5) | 0x1; // set a, pop
dcpu.memory[0] = instruction;
dcpu.memory[0xffff] = 15;
dcpu.stack_pointer = 0xffff;
dcpu.step();
assert_eq!(dcpu.memory[0], instruction);
assert_eq!(dcpu.registers[0], 15);
}
#[test]
fn push_pop() {
let mut dcpu = Dcpu::new();
let instruction: u16 = (0x18 << 10) | (0x18 << 5) | 0x1; // set push, pop
dcpu.memory[0] = instruction;
dcpu.memory[0xffff] = 0xbaad;
dcpu.memory[1] = 0xf00d;
dcpu.step();
assert_eq!(dcpu.memory[0], instruction);
assert_eq!(dcpu.memory[1], 0xf00d);
assert_eq!(dcpu.memory[0xffff], 0xbaad);
assert_eq!(dcpu.stack_pointer, 0);
} |
use num_traits::Zero;
use core::marker::PhantomData;
use crate::solver::{Cone, LinAlg};
//
/// Zero cone
///
/// <script src="https://polyfill.io/v3/polyfill.min.js?features=es6"></script>
/// <script id="MathJax-script" async src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-svg.js"></script>
///
/// \\[
/// \lbrace 0 \rbrace^n = \lbrace 0 \rbrace \times \cdots \times \lbrace 0 \rbrace =
/// \left\lbrace x \in \mathbb{R}^n
/// \ \middle|\ x=0
/// \right\rbrace
/// \\]
pub struct ConeZero<L: LinAlg>
{
ph_l: PhantomData<L>,
}
impl<L: LinAlg> ConeZero<L>
{
/// Creates an instance.
///
/// Returns the [`ConeZero`] instance.
pub fn new() -> Self
{
ConeZero {
ph_l: PhantomData,
}
}
}
impl<L: LinAlg> Cone<L> for ConeZero<L>
{
fn proj(&mut self, dual_cone: bool, x: &mut L::Sl) -> Result<(), ()>
{
if !dual_cone {
L::scale(L::F::zero(), x);
}
Ok(())
}
fn product_group<G: Fn(&mut L::Sl) + Copy>(&self, _dp_tau: &mut L::Sl, _group: G)
{
// do nothing
}
}
|
//! [](https://travis-ci.org/cdumay/rust-cdumay_http_client)
//! [](https://crates.io/crates/cdumay_http_client)
//! [](https://docs.rs/cdumay_http_client)
//! 
//!
//! cdumay_http_client is a basic library used to standardize result and serialize them using [serde](https://docs.serde.rs/serde/).
//!
//! ## Quickstart
//!
//! _Cargo.toml_:
//! ```toml
//! [dependencies]
//! cdumay_error = { git = "https://github.com/cdumay/rust-cdumay_error" }
//! cdumay_result = { git = "https://github.com/cdumay/rust-cdumay_result" }
//! ```
//!
//! _main.rs_:
//!
//! ```rust
//! extern crate cdumay_error;
//! extern crate cdumay_http_client;
//!
//! use cdumay_error::ErrorRepr;
//! use cdumay_http_client::authentication::NoAuth;
//! use cdumay_http_client::{ClientBuilder, HttpClient};
//!
//! fn main() {
//! let cli = HttpClient::<NoAuth>::new("https://www.rust-lang.org").unwrap();
//! let result = cli.get("/learn/get-started".into(), None, None, None);
//!
//! match result {
//! Ok(data) => println!("{}", data),
//! Err(err) => println!("{}", serde_json::to_string_pretty(&ErrorRepr::from(err)).unwrap()),
//! }
//! ```
//! _Output_:
//! ```html
//! <!doctype html>
//! <html lang="en-US">
//! <head>
//! <meta charset="utf-8">
//! <title>
//! [...]
//! ```
//! ## Errors
//!
//! Errors can be displayed using [cdumay_error](https://docs.serde.rs/cdumay_error/):
//!
//! ```json
//! {
//! "code": 500,
//! "message": "error trying to connect",
//! "msgid": "Err-05192"
//! }
//! ```
//!
//! ## Project Links
//!
//! - Issues: https://github.com/cdumay/rust-cdumay_http_client/issues
//! - Documentation: https://docs.rs/cdumay_http_client
#![feature(try_trait)]
extern crate base64;
extern crate cdumay_error;
extern crate cdumay_result;
extern crate chrono;
extern crate http;
extern crate humantime;
#[macro_use]
extern crate log;
extern crate reqwest;
extern crate serde;
extern crate serde_value;
pub use client::{BaseClient, CallContext, ClientBuilder, HttpClient};
pub use errors::{ClientError, HttpStatusCodeErrors, ResponseErrorWithContext};
mod utils;
mod client;
pub mod authentication;
mod errors; |
use necsim_core::{
cogs::{
GloballyCoherentLineageStore, Habitat, LineageReference, RngCore,
SeparableDispersalSampler, SpeciationProbability, TurnoverRate,
},
landscape::Location,
};
use necsim_core_bond::ClosedUnitF64;
use crate::cogs::{
coalescence_sampler::conditional::ConditionalCoalescenceSampler,
event_sampler::gillespie::GillespiePartialSimulation,
};
#[allow(clippy::module_name_repetitions)]
pub struct ProbabilityAtLocation {
speciation: ClosedUnitF64,
out_dispersal: ClosedUnitF64,
self_coalescence: ClosedUnitF64,
}
impl ProbabilityAtLocation {
#[allow(clippy::type_complexity)]
pub fn new<
H: Habitat,
G: RngCore,
R: LineageReference<H>,
S: GloballyCoherentLineageStore<H, R>,
D: SeparableDispersalSampler<H, G>,
T: TurnoverRate<H>,
N: SpeciationProbability<H>,
>(
location: &Location,
simulation: &GillespiePartialSimulation<
H,
G,
R,
S,
D,
ConditionalCoalescenceSampler<H, R, S>,
T,
N,
>,
lineage_store_includes_self: bool,
) -> Self {
let speciation_probability = simulation
.speciation_probability
.get_speciation_probability_at_location(location, &simulation.habitat);
let self_dispersal_probability = simulation
.dispersal_sampler
.get_self_dispersal_probability_at_location(location, &simulation.habitat);
let coalescence_probability_at_location =
ConditionalCoalescenceSampler::get_coalescence_probability_at_location(
location,
&simulation.habitat,
&simulation.lineage_store,
lineage_store_includes_self,
);
Self {
speciation: speciation_probability,
out_dispersal: speciation_probability.one_minus()
* self_dispersal_probability.one_minus(),
self_coalescence: speciation_probability.one_minus()
* self_dispersal_probability
* coalescence_probability_at_location,
}
}
pub fn speciation(&self) -> ClosedUnitF64 {
self.speciation
}
pub fn out_dispersal(&self) -> ClosedUnitF64 {
self.out_dispersal
}
pub fn self_coalescence(&self) -> ClosedUnitF64 {
self.self_coalescence
}
pub fn total(&self) -> ClosedUnitF64 {
let total =
self.speciation().get() + self.out_dispersal().get() + self.self_coalescence().get();
// Safety: Sum of disjoint event probabilities is in [0.0; 1.0]
unsafe { ClosedUnitF64::new_unchecked(total) }
}
}
|
use super::subscriber::EventSubscriber;
use crate::test::spec::unified_runner::{test_file::TestCase, TestFileEntity};
use serde::Serialize;
use std::{
collections::HashMap,
convert::TryInto,
sync::{Arc, RwLock},
};
use tokio::sync::broadcast;
use tracing::{field::Field, span, subscriber::Interest, Level, Metadata};
/// Models the data reported in a tracing event.
#[derive(Debug, Clone)]
pub struct TracingEvent {
/// The verbosity level.
pub level: Level,
/// The target, i.e. component the event corresponds to.
pub target: String,
/// Map of key/value pairs attached to the event.
pub fields: HashMap<String, TracingEventValue>,
}
impl TracingEvent {
fn new(level: Level, target: String) -> TracingEvent {
TracingEvent {
level,
target,
fields: Default::default(),
}
}
/// Retrieves the topology_id value for the event. Panics if there is no topology_id or if the
/// topology_id is not a string.
pub(crate) fn topology_id(&self) -> String {
self.get_value_as_string("topologyId")
}
/// Retrieves the field with the specified name as a string. Panics if the name is missing or
/// or is not a string.
pub fn get_value_as_string(&self, field: &'static str) -> String {
match self.fields.get(field) {
Some(TracingEventValue::String(s)) => s.to_string(),
Some(v) => panic!("field {} was unexpectedly not a string: got {:?}", field, v),
None => panic!("field {} was unexpectedly None", field),
}
}
}
/// Models the value of a field in a tracing event.
#[derive(Debug, Clone)]
pub enum TracingEventValue {
F64(f64),
I64(i64),
U64(u64),
I128(i128),
U128(u128),
Bool(bool),
String(String),
}
/// Used for serializing tracing event data to BSON for the purpose of matching against expected
/// values.
impl Serialize for TracingEventValue {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match self {
TracingEventValue::F64(v) => serializer.serialize_f64(*v),
TracingEventValue::I64(v) => serializer.serialize_i64(*v),
TracingEventValue::U64(v) => serializer.serialize_u64(*v),
TracingEventValue::I128(v) => match (*v).try_into() {
Ok(i) => serializer.serialize_i64(i),
Err(e) => Err(serde::ser::Error::custom(format!(
"Failed to serialize i128 as i64: {}",
e
))),
},
TracingEventValue::U128(v) => match (*v).try_into() {
Ok(i) => serializer.serialize_u64(i),
Err(e) => Err(serde::ser::Error::custom(format!(
"Failed to serialize u128 as u64: {}",
e
))),
},
TracingEventValue::Bool(v) => serializer.serialize_bool(*v),
TracingEventValue::String(v) => serializer.serialize_str(v.as_str()),
}
}
}
/// A type for use in tests that need to consume tracing events. For single-threaded tests,
/// this type may be created directly in the test using the desired severity levels via
/// [`TracingHandler::new_with_levels`] and used as a local default via
/// [`tracing::subscriber::with_default`] or [`tracing::subscriber::set_default`].
///
/// For multi-threaded tests, a global default tracing handler must be used in order to
/// capture messages emitted by all threads. For that use case, acquire the test lock and
/// then use DEFAULT_GLOBAL_TRACING_HANDLER. You can configure the desired verbosity levels
/// for the scope of a test using [`TracingHandler::set_levels`].
///
/// The handler will listen for tracing events for its associated components/levels and
/// publish them to a broadcast channel. To receive the broadcasted events, call
/// [`TracingHandler::subscribe`] to create a new [`TracingSubscriber`].
#[derive(Clone, Debug)]
pub(crate) struct TracingHandler {
/// Sender for the channel where events will be broadcast.
event_broadcaster: broadcast::Sender<TracingEvent>,
/// Contains a map of (tracing component name, maximum verbosity level) which this handler
/// will subscribe to messages for. This is stored in an Arc<RwLock> so that we are able
/// to mutate the global default tracing handler's levels.
levels: Arc<RwLock<HashMap<String, Level>>>,
}
impl TracingHandler {
/// Initializes a new tracing handler with no configured components/levels.
pub(crate) fn new() -> TracingHandler {
Self::new_with_levels(HashMap::default())
}
/// Initializes a new tracing handler with the specified components and corresponding maximum
/// verbosity levels.
pub(crate) fn new_with_levels(levels: HashMap<String, Level>) -> TracingHandler {
let (event_broadcaster, _) = tokio::sync::broadcast::channel(10_000);
Self {
event_broadcaster,
levels: Arc::new(RwLock::new(levels)),
}
}
/// Sets the levels for this handler to the provided levels, and returns a
/// [`TracingLevelsGuard`] which, when dropped, will clear the levels set on this handler.
/// This can be used to temporarily configure the levels on the global default handler for
/// the duration of a test.
pub(crate) fn set_levels(&self, new_levels: HashMap<String, Level>) -> TracingLevelsGuard {
let mut levels = self.levels.write().unwrap();
*levels = new_levels;
TracingLevelsGuard { handler: self }
}
/// Returns a `TracingSubscriber` that will listen for tracing events broadcast by this handler.
pub(crate) fn subscribe(&self) -> EventSubscriber<TracingHandler, TracingEvent> {
EventSubscriber::new(self, self.event_broadcaster.subscribe())
}
}
/// Convenience type for configuring max verbosity levels on a tracing handler.
/// When dropped the levels for the corresponding handler will be cleared out.
pub(crate) struct TracingLevelsGuard<'a> {
handler: &'a TracingHandler,
}
impl Drop for TracingLevelsGuard<'_> {
fn drop(&mut self) {
self.handler.levels.write().unwrap().clear();
}
}
/// Merges together the max verbosity levels for components across all test file client entities as
/// well as per-test client entities, so that the a single tracing handler can observe all events
/// any client in the test will expect.
pub(crate) fn max_verbosity_levels_for_test_case(
entities: &Option<Vec<TestFileEntity>>,
test_case: &TestCase,
) -> HashMap<String, Level> {
let mut merged_levels = HashMap::new();
let mut update_merged_levels = |entity: &TestFileEntity| {
let client_entity = match entity {
TestFileEntity::Client(client) => client,
_ => return,
};
if let Some(ref log_levels) = client_entity.observe_log_messages {
for (component, max_level) in log_levels.iter() {
match merged_levels.get_mut(component) {
Some(current_max) => {
*current_max = Ord::max(*current_max, *max_level);
}
None => {
merged_levels.insert(component.clone(), *max_level);
}
}
}
}
};
// entities that are created in this test via "createEntities" operations
test_case
.operations
.iter()
.filter(|o| o.name == "createEntities")
.for_each(|o| {
o.test_file_entities()
.unwrap()
.iter()
.for_each(|e| update_merged_levels(e))
});
// test-file level entities. these might not all actually be used by this particular
// test case but we include them for simplicity.
if let Some(ref entities) = entities {
entities.iter().for_each(|e| update_merged_levels(e));
};
merged_levels
}
/// Implementation allowing `TracingHandler` to subscribe to `tracing` events.
impl tracing::Subscriber for TracingHandler {
fn enabled(&self, metadata: &Metadata<'_>) -> bool {
let levels = self.levels.read().unwrap();
match levels.get(metadata.target()) {
Some(level) => metadata.level() <= level,
None => false,
}
}
/// The default implementation of this method calls `enabled` and returns either
/// `never` (if `enabled` returns false) or `always` (if`enabled` returns true).
/// When `always` or `never` is returned, that value is cached for the callsite.
/// In practice, this prevents us from dynamically changing the interest of the
/// global tracing handler in particular tracing messages over time, since whatever
/// value is returned the first time a callsite is hit will always be the value.
/// By overriding this method to return `sometimes` for all events emitted
/// by the driver, we tell tracing to dynamically check `enabled` each time the
/// callsite for the event is hit.
fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest {
if metadata.target().starts_with("mongodb") {
Interest::sometimes()
} else {
// at this time, we are only ever interested in tracing events emitted
// by the driver, so we can safely return `never` for all others.
Interest::never()
}
}
fn event(&self, event: &tracing::Event<'_>) {
let mut test_event = TracingEvent::new(
*event.metadata().level(),
event.metadata().target().to_string(),
);
let mut visitor = TracingEventVisitor::new(&mut test_event);
event.record(&mut visitor);
// this only errors if no receivers are listening; we don't care if that is the case.
let _: std::result::Result<usize, broadcast::error::SendError<TracingEvent>> =
self.event_broadcaster.send(test_event);
}
/// These methods all relate to spans. Since we don't create any spans ourselves or need
/// to make any assertions about them, we do not need real implementations.
fn new_span(&self, _span: &span::Attributes<'_>) -> span::Id {
span::Id::from_u64(1)
}
fn record(&self, _span: &span::Id, _values: &span::Record<'_>) {}
fn record_follows_from(&self, _span: &span::Id, _follows: &span::Id) {}
fn enter(&self, _span: &span::Id) {}
fn exit(&self, _span: &span::Id) {}
}
/// A visitor which traverses each value in a tracing event and stores it in the underlying
/// `TracingEvent`.
struct TracingEventVisitor<'a> {
event: &'a mut TracingEvent,
}
impl TracingEventVisitor<'_> {
fn new(event: &mut TracingEvent) -> TracingEventVisitor {
TracingEventVisitor { event }
}
}
impl tracing::field::Visit for TracingEventVisitor<'_> {
fn record_f64(&mut self, field: &Field, value: f64) {
self.event
.fields
.insert(field.name().to_string(), TracingEventValue::F64(value));
}
fn record_i64(&mut self, field: &Field, value: i64) {
self.event
.fields
.insert(field.name().to_string(), TracingEventValue::I64(value));
}
fn record_u64(&mut self, field: &Field, value: u64) {
self.event
.fields
.insert(field.name().to_string(), TracingEventValue::U64(value));
}
fn record_i128(&mut self, field: &Field, value: i128) {
self.event
.fields
.insert(field.name().to_string(), TracingEventValue::I128(value));
}
fn record_u128(&mut self, field: &Field, value: u128) {
self.event
.fields
.insert(field.name().to_string(), TracingEventValue::U128(value));
}
fn record_bool(&mut self, field: &Field, value: bool) {
self.event
.fields
.insert(field.name().to_string(), TracingEventValue::Bool(value));
}
fn record_str(&mut self, field: &Field, value: &str) {
self.event.fields.insert(
field.name().to_string(),
TracingEventValue::String(value.to_string()),
);
}
fn record_debug(&mut self, field: &Field, value: &dyn std::fmt::Debug) {
self.event.fields.insert(
field.name().to_string(),
TracingEventValue::String(format!("{:?}", value)),
);
}
}
|
//! Cuckoo filter probabilistic data structure for membership testing and cardinality counting.
//!
//! # Usage
//!
//! This crate is [on crates.io](https://crates.io/crates/cuckoofilter) and can be
//! used by adding `cuckoofilter` to the dependencies in your project's `Cargo.toml`.
//!
//! ```toml
//! [dependencies]
//! cuckoofilter = "0.1"
//! ```
//!
//! And this in your crate root:
//!
//! ```rust
//! extern crate cuckoofilter;
//! ```
#![cfg_attr(feature = "dev", feature(plugin))]
#![cfg_attr(feature = "dev", plugin(clippy))]
mod bucket;
mod util;
extern crate rand;
extern crate byteorder;
use bucket::{Bucket, Fingerprint, BUCKET_SIZE};
use util::{get_fai, get_alt_index, FaI};
use rand::Rng;
use std::iter::repeat;
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hasher, Hash};
use std::marker::PhantomData;
use std::mem;
/// If insertion fails, we will retry this many times.
pub const MAX_REBUCKET: u32 = 500;
/// The default number of buckets.
pub const DEFAULT_CAPACITY: u64 = (1 << 20) - 1;
/// A cuckoo filter class exposes a Bloomier filter interface,
/// providing methods of add, delete, contains.
///
/// # Examples
///
/// ```
/// extern crate cuckoofilter;
///
/// let words = vec!["foo", "bar", "xylophone", "milagro"];
/// let mut cf = cuckoofilter::CuckooFilter::new();
///
/// let mut insertions = 0;
/// for s in &words {
/// if cf.test_and_add(s) {
/// insertions += 1;
/// }
/// }
///
/// assert_eq!(insertions, words.len());
/// assert_eq!(cf.len(), words.len() as u64);
///
/// // Re-add the first element.
/// cf.add(words[0]);
///
/// assert_eq!(cf.len(), words.len() as u64 + 1);
///
/// for s in &words {
/// cf.delete(s);
/// }
///
/// assert_eq!(cf.len(), 1);
/// assert!(!cf.is_empty());
///
/// cf.delete(words[0]);
///
/// assert_eq!(cf.len(), 0);
/// assert!(cf.is_empty());
///
/// ```
pub struct CuckooFilter<H> {
buckets: Box<[Bucket]>,
len: u64,
_hasher: std::marker::PhantomData<H>,
}
impl Default for CuckooFilter<DefaultHasher> {
fn default() -> Self {
CuckooFilter::new()
}
}
impl CuckooFilter<DefaultHasher> {
/// Construct a CuckooFilter with default capacity and hasher.
pub fn new() -> CuckooFilter<DefaultHasher> {
Self::with_capacity(DEFAULT_CAPACITY)
}
}
impl<H> CuckooFilter<H>
where H: Hasher + Default
{
/// Constructs a Cuckoo Filter with a given max capacity
pub fn with_capacity(cap: u64) -> CuckooFilter<H> {
let capacity = match cap.next_power_of_two() / BUCKET_SIZE as u64 {
0 => 1,
cap => cap,
};
CuckooFilter {
buckets: repeat(Bucket::new())
.take(capacity as usize)
.collect::<Vec<_>>()
.into_boxed_slice(),
len: 0,
_hasher: PhantomData,
}
}
/// Checks if `data` is in the filter.
pub fn contains<T: ?Sized + Hash>(&self, data: &T) -> bool {
let FaI { fp, i1, i2 } = get_fai::<T, H>(data);
let len = self.buckets.len();
self.buckets[i1 % len]
.get_fingerprint_index(fp)
.or(self.buckets[i2 % len].get_fingerprint_index(fp))
.is_some()
}
/// Adds `data` to the filter. Returns true if the insertion was successful.
/// Note that while you can put any hashable type in the same filter, beware
/// for side effects like that the same number can have diferent hashes
/// depending on the type.
/// So for the filter, 4711i64 isn't the same as 4711u64.
pub fn add<T: ?Sized + Hash>(&mut self, data: &T) -> bool {
let fai = get_fai::<T, H>(data);
if self.put(fai.fp, fai.i1) || self.put(fai.fp, fai.i2) {
return true;
}
let len = self.buckets.len();
let mut rng = rand::thread_rng();
let mut i = fai.random_index(&mut rng);
let mut fp = fai.fp;
for _ in 0..MAX_REBUCKET {
let other_fp;
{
let loc = &mut self.buckets[i % len].buffer[rng.gen_range(0, BUCKET_SIZE)];
other_fp = *loc;
*loc = fp;
i = get_alt_index::<H>(other_fp, i);
}
if self.put(other_fp, i) {
return true;
}
fp = other_fp;
}
panic!("Map is full, could not insert item");
}
/// Adds `data` to the filter if it does not exist in the filter yet.
/// Returns `true` if `data` was not yet present in the filter and added
/// successfully.
pub fn test_and_add<T: ?Sized + Hash>(&mut self, data: &T) -> bool {
if self.contains(data) {
false
} else {
self.add(data)
}
}
/// Number of items in the filter.
pub fn len(&self) -> u64 {
self.len
}
/// Number of bytes the filter occupies in memory
pub fn memory_usage(&self) -> usize {
mem::size_of_val(self) + self.buckets.len() * mem::size_of::<Bucket>()
}
/// Check if filter is empty
pub fn is_empty(&self) -> bool {
self.len == 0
}
/// Deletes `data` from the filter. Returns true if `data` existed in the
/// filter before.
pub fn delete<T: ?Sized + Hash>(&mut self, data: &T) -> bool {
let FaI { fp, i1, i2 } = get_fai::<T, H>(data);
self.remove(fp, i1) || self.remove(fp, i2)
}
/// Removes the item with the given fingerprint from the bucket indexed by i.
fn remove(&mut self, fp: Fingerprint, i: usize) -> bool {
let len = self.buckets.len();
if self.buckets[i % len].delete(fp) {
self.len -= 1;
true
} else {
false
}
}
fn put(&mut self, fp: Fingerprint, i: usize) -> bool {
let len = self.buckets.len();
if self.buckets[i % len].insert(fp) {
self.len += 1;
true
} else {
false
}
}
}
|
#![cfg_attr(not(feature = "std"), no_std)]
pub use pallet::*;
#[frame_support::pallet]
pub mod pallet {
use frame_support::dispatch::DispatchResultWithPostInfo;
use frame_support::pallet_prelude::*;
use frame_system::pallet_prelude::*;
use super::*;
#[pallet::config]
pub trait Config: frame_system::Config {
type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;
}
#[pallet::pallet]
#[pallet::generate_store(pub(super) trait Store)]
// NOTE: if the visibility of trait store is private but you want to make it available
// in super, then use `pub(super)` or `pub(crate)` to make it available in crate.
pub struct Pallet<T>(_);
// pub struct Pallet<T, I = ()>(PhantomData<T>); // for instantiable pallet
#[pallet::hooks]
impl<T: Config> Hooks<BlockNumberFor<T>> for Pallet<T> {
}
// Storage --------
#[pallet::storage]
#[pallet::getter(fn roles_by_id)]
pub type UserRoles<T> = StorageMap<_, Blake2_128Concat, <T as frame_system::Config>::AccountId, Vec<Role>>;
// Dispatchable Calls --
#[pallet::call]
impl<T: Config> Pallet<T> {
#[pallet::weight(10_000 + T::DbWeight::get().writes(1))]
pub fn assign_role(origin: OriginFor<T>, role: super::Role) -> DispatchResultWithPostInfo {
let who = ensure_signed(origin)?;
match UserRoles::<T>::get(&who) {
None => {
let mut roles = Vec::new();
roles.push(role);
UserRoles::<T>::insert(&who, roles);
},
Some(mut roles) => {
let role_exists = roles.iter().any(|&r| r == role );
if !role_exists {
roles.push(role);
UserRoles::<T>::insert(&who, roles)
}
}
}
Self::deposit_event(Event::RoleAssigned(who.clone(), role));
Ok(().into())
}
// TODO:
// #[pallet::weight(10_000 + T::DbWeight::get().writes(1))]
// pub fn unassign_role(origin: OriginFor<T>, role: super::Role) -> DispatchResultWithPostInfo {
//
// }
}
#[pallet::error]
pub enum Error<T> {
// This pallet does not return error from here
}
#[pallet::event]
#[pallet::metadata()]
#[pallet::generate_deposit(pub(super) fn deposit_event)]
pub enum Event<T: Config> {
// This pallet does not emit events
// [who, Role]
RoleAssigned(T::AccountId, Role)
}
}
#[macro_use]
extern crate lazy_static; // https://crates.io/crates/lazy_static
use sp_std::collections::btree_map::{BTreeMap, Entry};
use sp_std::marker::PhantomData;
use sp_std::fmt::Debug;
use sp_std::vec;
use sp_std::vec::Vec;
use frame_support::codec::{Encode, Decode};
use sp_runtime::traits::{
SignedExtension, DispatchInfoOf, Dispatchable
};
use sp_runtime::transaction_validity::{
ValidTransaction, TransactionValidityError,
InvalidTransaction, TransactionValidity,
TransactionPriority, TransactionLongevity,
};
use frame_support::weights::DispatchInfo;
use frame_support::traits::GetCallMetadata;
// For debugging --
use sp_runtime::print;
use frame_support::debug;
impl<T: Config> Pallet<T> {
pub fn validate_access_to_call(who: &T::AccountId, pallet_name: &'static str, function_name: &'static str) -> bool {
// Check if pallet function is in PUBLIC_ACCESS
if PUBLIC_ACCESS.iter().any(|&x| x.0 == pallet_name && x.1 == function_name) {
return true;
}
// Get user's roles
let user_roles = UserRoles::<T>::get(who);
if user_roles == None {
return false;
}
debug::info!("-- validate_access_to_call --> user_roles -> {:?}", user_roles);
// FIXME: Make more efficient function
// maybe change the RBAC structure to:
// { key: "Palletname:function_name", value: Role }
// for each role check if it has access to pallet_name and function_name
user_roles.unwrap().iter().any(|&role| {
let role_access_list = RBAC.get(&role);
debug::info!("-- iterating user_roles");
debug::info!(" -- user has role : {:?} ", role);
debug::info!(" -- role has access_list : {:?}", role_access_list);
match role_access_list {
None => false,
Some(access_list) => {
debug::info!("-- iterating access_list -- ");
let has_access = access_list.iter().any(|&access| {
debug::info!("-- access is {:?} ", access);
debug::info!("-- pallet_name is {:?}", pallet_name);
debug::info!("-- function_name is {:?}", function_name);
access.0 == pallet_name && access.1 == function_name
});
return has_access;
}
}
})
}
}
#[derive(Eq, Ord, PartialOrd, PartialEq, Copy, Clone, Encode, Decode, Debug)]
pub enum Role {
Customer,
Lab,
Hospital,
Doctor,
}
// Pallet name should be spelled as in runtime's construct_runtime! macro declaration
lazy_static! {
// List of roles and pallet functions it has access to
#[derive(Debug)]
pub static ref RBAC: BTreeMap<Role, Vec<(&'static str, &'static str)>> = {
let role_access_list: Vec<(Role, (&str, &str))> = vec![
// Role , pallet name, function_name
// Customers --
// (Role::Customer, ("Orders", "create_order")),
// (Role::Customer, ("Orders", "pay_order")),
// Labs --
(Role::Lab, ("TemplateModule", "do_something")),
(Role::Lab, ("TemplateModule", "cause_error"))
];
let mut rbac = BTreeMap::new();
// rbac.insert(Role::Lab, vec![("TemplateModule", "do_something")]);
for role_access in role_access_list.iter() {
match rbac.entry(role_access.0) {
Entry::Vacant(role) => {
debug::info!("Entry::Vacant(role) -> role = {:?}", role);
role.insert(vec![role_access.1]);
},
Entry::Occupied(mut role) => {
debug::info!("Entry::Occupied(mut role) -> role = : {:?}", role);
role.get_mut().push(role_access.1);
}
}
}
//let _initialize = rbac.get(&Role::Customer);
//debug::info!("rbac: {:?}", rbac);
rbac
};
// List of public access pallet functions
#[derive(Debug)]
pub static ref PUBLIC_ACCESS: Vec<(&'static str, &'static str)> = {
let public_access_list: Vec<(&str, &str)> = vec![
("RBAC", "assign_role")
];
public_access_list
};
}
#[derive(Encode, Decode, Clone, Eq, PartialEq)]
pub struct Authorize<T: Config + Send + Sync>(PhantomData<T>);
impl<T: Config + Send + Sync> Debug for Authorize<T> {
#[cfg(feature = "std")]
fn fmt(&self, f: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result {
write!(f, "Authorize")
}
#[cfg(not(feature = "std"))]
fn fmt(&self, _: &mut sp_std::fmt::Formatter) -> sp_std::fmt::Result {
Ok(())
}
}
impl<T: Config + Send + Sync> SignedExtension for Authorize<T> where
T::Call: Dispatchable<Info=DispatchInfo> + GetCallMetadata {
type AccountId = T::AccountId;
type Call = T::Call;
type AdditionalSigned = ();
type Pre = ();
const IDENTIFIER: &'static str = "Authorize";
fn additional_signed(&self) -> sp_std::result::Result<(), TransactionValidityError> { Ok(()) }
fn validate(
&self,
who: &Self::AccountId,
call: &Self::Call,
info: &DispatchInfoOf<Self::Call>,
_len: usize,
) -> TransactionValidity {
let metadata = call.get_call_metadata();
print("---- ---- RBAC --- -----");
debug::info!("{:?}", *RBAC);
debug::info!("-- In rbac pallet --> Metadata --- {:?}", metadata);
let pallet_name = metadata.pallet_name;
let function_name = metadata.function_name;
// Check if who has valid role for pallet_name and function_name
// - check who's roles
// - check if role has access to (pallet_name, function_name)
let has_access = Pallet::<T>::validate_access_to_call(who, pallet_name, function_name);
if has_access {
Ok(ValidTransaction {
priority: info.weight as TransactionPriority,
longevity: TransactionLongevity::max_value(),
propagate: true,
..Default::default()
})
} else {
print("Access Denied!");
debug::info!("------ Access Denied -------");
Err(InvalidTransaction::Call.into())
}
}
}
|
/*
Copyright 2019-2023 Didier Plaindoux
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#[cfg(test)]
mod tests_monadic {
use celma_core::parser::and::AndOperation;
use celma_core::parser::bind::BindOperation;
use celma_core::parser::char::char;
use celma_core::parser::core::eos;
use celma_core::parser::fmap::FMapOperation;
use celma_core::parser::literal::string;
use celma_core::parser::parser::Parse;
use celma_core::parser::repeat::RepeatOperation;
use celma_core::stream::char_stream::CharStream;
#[test]
fn it_parse_a_str_and_fmap_it_to_u32() {
let response = string("hello")
.fmap(|a| a.len())
.parse(CharStream::new("hello world!"));
assert_eq!(response.fold(|v, _, _| v == 5, |_, _| false), true);
}
#[test]
fn it_parse_a_str_and_bind_it_a_str_parser() {
let response = string("he")
.bind(|a| char(a.chars().next().unwrap()).rep().and(eos()))
.parse(CharStream::new("hehhhhhhh"));
assert_eq!(response.fold(|v, _, _| v.0.len() == 7, |_, _| false), true);
}
}
|
fn sort(s: String) -> String {
let mut chars: Vec<_> = s.chars().collect();
chars.sort_by(|a, b| a.cmp(b));
// String::from_iter(chars)
chars
.into_iter()
.map(|c| c.to_string())
.collect::<Vec<_>>()
.join("")
}
fn group_anagrams(strs: Vec<String>) -> Vec<Vec<String>> {
use std::collections::HashMap;
if strs.len() == 1 {
return vec![strs];
}
let mut hm: HashMap<String, Vec<String>> = HashMap::new();
for s in strs {
let sorted_str = sort(s.to_string());
hm.entry(sorted_str)
.and_modify(|v| (*v).push(s.to_string()))
.or_insert(vec![s]);
}
hm.into_values().collect()
}
fn main() {
let res = group_anagrams(vec![
"eat".to_string(),
"tea".to_string(),
"tan".to_string(),
"ate".to_string(),
"nat".to_string(),
"bat".to_string(),
]);
println!("{res:?}");
}
|
pub mod progress;
|
enum Instruction {
Forward(u32),
Left(u32),
Right(u32),
North(u32),
South(u32),
East(u32),
West(u32)
}
impl From<&str> for Instruction {
fn from(st: &str) -> Self {
let amount = st[1..].parse().unwrap();
match st.chars().nth(0).unwrap() {
'N' => Instruction::North(amount),
'S' => Instruction::South(amount),
'E' => Instruction::East(amount),
'W' => Instruction::West(amount),
'L' => Instruction::Left(amount),
'R' => Instruction::Right(amount),
'F' => Instruction::Forward(amount),
_ => panic!()
}
}
}
struct Ship {
heading: u16,
x: i32,
y: i32
}
struct Waypoint {
x: i32,
y: i32
}
impl Waypoint {
fn new() -> Self {
Waypoint { x: 10, y: 1 }
}
fn rotate_ccw(&mut self, amount: u32) {
let mut amount_left = amount;
while amount_left > 0 {
let (x, y) = (self.x, self.y);
self.x = -y;
self.y = x;
amount_left -= 90;
}
}
fn rotate_cw(&mut self, amount: u32) {
let mut amount_left = amount;
while amount_left > 0 {
let (x, y) = (self.x, self.y);
self.x = y;
self.y = -x;
amount_left -= 90;
}
}
fn apply(&mut self, ship: &mut Ship, instruction: &Instruction) {
match instruction {
Instruction::Forward(amount) => {
ship.x += self.x * (*amount as i32);
ship.y += self.y * (*amount as i32);
},
Instruction::Left(amount) => self.rotate_ccw(*amount),
Instruction::Right(amount) => self.rotate_cw(*amount),
Instruction::North(amount) => self.y += *amount as i32,
Instruction::South(amount) => self.y -= *amount as i32,
Instruction::East(amount) => self.x += *amount as i32,
Instruction::West(amount) => self.x -= *amount as i32
}
}
fn execute(&mut self, ship: &mut Ship, input: &str) {
std::fs::read_to_string(input).unwrap()
.lines()
.map(|line| Instruction::from(line))
.for_each(|instr| self.apply(ship, &instr));
}
}
impl Ship {
fn new() -> Self {
Ship { heading: 0, x: 0, y: 0 }
}
fn apply(&mut self, instruction: &Instruction) {
match instruction {
Instruction::Forward(amount) => match self.heading {
0 => self.apply(&Instruction::East(*amount)),
90 => self.apply(&Instruction::North(*amount)),
180 => self.apply(&Instruction::West(*amount)),
270 => self.apply(&Instruction::South(*amount)),
_ => panic!()
},
Instruction::Left(amount) => self.heading = turn_degrees(self.heading, *amount as i32),
Instruction::Right(amount) => self.heading = turn_degrees(self.heading, -(*amount as i32)),
Instruction::North(amount) => self.y += *amount as i32,
Instruction::South(amount) => self.y -= *amount as i32,
Instruction::East(amount) => self.x += *amount as i32,
Instruction::West(amount) => self.x -= *amount as i32
}
}
fn execute(&mut self, input: &str) {
std::fs::read_to_string(input).unwrap()
.lines()
.map(|line| Instruction::from(line))
.for_each(|instr| self.apply(&instr));
}
}
fn turn_degrees(heading: u16, turn: i32) -> u16 {
let mut raw = (heading as i32 + turn) % 360;
while raw < 0 { raw += 360; }
raw as u16
}
fn manhattan_distance(x1: i32, y1: i32, x2: i32, y2: i32) -> i32 {
(x1 - x2).abs() + (y1 - y2).abs()
}
fn main() {
// Part 1
let mut ship = Ship::new();
ship.execute("input.txt");
println!("Part one: {}", manhattan_distance(ship.x, ship.y, 0, 0));
// Part 2
let mut ship = Ship::new();
let mut waypoint = Waypoint::new();
waypoint.execute(&mut ship, "input.txt");
println!("Part two: {}", manhattan_distance(ship.x, ship.y, 0, 0));
}
#[test]
fn test_part_1() {
let mut ship = Ship::new();
ship.execute("example.txt");
assert_eq!(manhattan_distance(ship.x, ship.y, 0, 0), 25);
}
#[test]
fn test_part_2() {
let mut ship = Ship::new();
let mut waypoint = Waypoint::new();
waypoint.execute(&mut ship, "example.txt");
assert_eq!(manhattan_distance(ship.x, ship.y, 0, 0), 286);
} |
use super::*;
/// A text formatter.
///
/// # Semantics
///
/// Formats text according to the marker used:
///
/// - `*bold*`
/// - `/italic/`
/// - `_underline_`
/// - `+strike through+`
/// - `~code~`
/// - `=verbatim=`
///
/// # Syntax
///
/// ```text
/// PRE MARKER BORDER BODY BORDER MARKER POST
/// ```
///
/// Not separated by any whitespace.
///
/// `PRE` is one of `-`, whitespace, `(`, `'`,`"`, `{` or beginning of line.
///
/// `BORDER` is anything but whitespace, `,`, `'` and `"`.
///
/// `MARKER` is one of the markers specified in [semantics][#Semantics].
///
/// `BODY` can contain any character but may not span over more than 3 lines.
///
/// `POST` is one of `-`, whitespace, `.`, `,`, `:`, `!`, `?`, `;`, `'`, `"`, `)`, `}`, `[` or
/// end of line.
///
/// The part `BORDER BODY BORDER` is parsed as a [`SecondaryString`] and can contain the
/// standard set of objects when the markup is bold, italic, strike through or udnerline. The
/// content of verbatim and code is not parsed.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TextMarkup {
pub kind: TextMarkupKind,
}
/// The kind and content of a [`TextMarkup`] object.
///
/// Only code and verbatim can't contain other objects.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum TextMarkupKind {
// TODO maybe make these actual different types instead of an enum
Bold(SecondaryString<StandardSet>),
Italic(SecondaryString<StandardSet>),
Underline(SecondaryString<StandardSet>),
StrikeThrough(SecondaryString<StandardSet>),
Code(String),
Verbatim(String),
}
|
#![feature(plugin)]
#![feature(decl_macro)]
#![feature(custom_derive)]
#![plugin(rocket_codegen)]
extern crate rocket;
extern crate rocket_contrib;
extern crate reqwest;
extern crate r2d2;
extern crate r2d2_postgres;
extern crate postgres;
extern crate jsonwebtoken as jwt;
#[macro_use] extern crate log;
#[macro_use] extern crate serde_derive;
#[macro_use] extern crate serde_json;
#[macro_use] extern crate hyper;
mod controller;
use r2d2::Pool;
use r2d2_postgres::{TlsMode, PostgresConnectionManager};
pub struct Config {
jwt_secret: &'static str,
api_root: &'static str
}
fn main() {
let db_url = option_env!("DB_URL").unwrap_or("postgres://postgres@localhost/pos_dev");
let manager = PostgresConnectionManager::new(db_url, TlsMode::None).unwrap();
let pool = Pool::new(manager).unwrap();
let config = Config {
jwt_secret: option_env!("JWT_SECRET").unwrap_or("BbZJjyoXAdr8BUZuiKKARWimKfrSmQ6fv8kZ7Offf"),
api_root: option_env!("EXTERNAL_API_ROOT").unwrap_or("http://api-stage.example.com/v1")
};
rocket::ignite()
.manage(pool)
.manage(config)
.mount("/", routes![
controller::index::handler,
controller::app_setup::handler,
controller::logout::handler,
controller::challenge::handler,
controller::token::handler
])
.launch();
}
|
fn main() {
let tup:(u32, i64, f64, char, bool) = (128, -127, 345.789, 'D', false);
println!("(x, y, z, d) = {:?}", tup);
let (_x,_y,_z,_d, _b) = tup; // see that b is non appended with a under score
println!("value for char data-type is : {}", _d);
println!("value at 3rd position : {}", tup.3); // printing the value at 3rd position
println!("value for bool is : {}", tup.4); // printing the boolean variable
println!("Printing the whole tuple {:?}", tup);
} |
use debugger::Debugger;
use egui_glow::EguiGlow;
use glutin::{
event::{ElementState, ModifiersState, VirtualKeyCode, WindowEvent},
PossiblyCurrent, WindowedContext,
};
use pyrite::GbaHandle;
use crate::pyrite_window::PyriteWindow;
pub struct DebuggerWindow {
context: Option<WindowedContext<PossiblyCurrent>>,
gl: glow::Context,
modifiers: ModifiersState,
wants_close: bool,
gui: EguiGlow,
repaint: bool,
debugger_state: Debugger,
gba: GbaHandle,
}
impl DebuggerWindow {
pub fn new(
gba: GbaHandle,
context: WindowedContext<PossiblyCurrent>,
) -> anyhow::Result<DebuggerWindow> {
let gl = unsafe {
glow::Context::from_loader_function(|s| context.get_proc_address(s) as *const _)
};
let gui = EguiGlow::new(context.window(), &gl);
Ok(DebuggerWindow {
context: Some(context),
gl,
modifiers: ModifiersState::default(),
wants_close: false,
gui,
repaint: false,
debugger_state: Debugger::default(),
gba,
})
}
fn on_keyboard_input(&mut self, input: glutin::event::KeyboardInput) {
if let Some(VirtualKeyCode::Escape) = input.virtual_keycode {
if input.state == ElementState::Pressed {
self.wants_close = true
}
}
}
pub fn wants_close(&self) -> bool {
self.wants_close
}
}
impl Drop for DebuggerWindow {
fn drop(&mut self) {
self.debugger_state.destroy(&self.gba);
if self.try_swap_context() {
self.gui.destroy(&self.gl);
log::debug!("destroyed debugger UI");
} else {
log::debug!("failed to swap to debugger window context for cleanup");
}
}
}
impl PyriteWindow for DebuggerWindow {
fn on_window_event(&mut self, event: WindowEvent) {
if self.gui.on_event(&event) {
return;
}
match event {
WindowEvent::KeyboardInput { input, .. } => self.on_keyboard_input(input),
WindowEvent::Resized(..) => self.repaint = true,
_ => (),
}
}
fn render(&mut self) {
let window = self.context.as_ref().expect("no context").window();
if self.repaint {
self.gui.paint(window, &self.gl);
self.repaint = false;
}
}
fn update(&mut self) -> bool {
let window = self.context.as_ref().expect("no context").window();
self.repaint = self.gui.run(window, |gui_context| {
self.debugger_state.render(gui_context, &self.gba);
});
self.repaint
}
fn context_mut_opt(&mut self) -> &mut Option<glutin::WindowedContext<glutin::PossiblyCurrent>> {
&mut self.context
}
fn context_opt(&self) -> &Option<glutin::WindowedContext<glutin::PossiblyCurrent>> {
&self.context
}
fn modifiers_mut(&mut self) -> &mut ModifiersState {
&mut self.modifiers
}
fn gl(&self) -> &glow::Context {
&self.gl
}
}
|
use common::event::EventPublisher;
use common::result::Result;
use crate::domain::collection::{CollectionId, CollectionRepository};
use crate::domain::publication::{PublicationId, PublicationRepository};
pub struct AddPublication<'a> {
event_pub: &'a dyn EventPublisher,
collection_repo: &'a dyn CollectionRepository,
publication_repo: &'a dyn PublicationRepository,
}
impl<'a> AddPublication<'a> {
pub fn new(
event_pub: &'a dyn EventPublisher,
collection_repo: &'a dyn CollectionRepository,
publication_repo: &'a dyn PublicationRepository,
) -> Self {
AddPublication {
event_pub,
collection_repo,
publication_repo,
}
}
pub async fn exec(&self, collection_id: String, publication_id: String) -> Result<()> {
let collection_id = CollectionId::new(collection_id)?;
let mut collection = self.collection_repo.find_by_id(&collection_id).await?;
let publication_id = PublicationId::new(publication_id)?;
let publication = self.publication_repo.find_by_id(&publication_id).await?;
collection.add_item(&publication)?;
self.collection_repo.save(&mut collection).await?;
self.event_pub
.publish_all(collection.base().events()?)
.await?;
Ok(())
}
}
|
use math::vec::*;
use math::scalar::*;
use math::traits::*;
use math::quat::*;
use math::pose::*;
use math::geom::*;
use std::ops::Neg;
use std::{default, fmt};
pub const DEFAULT_PLANE_WIDTH: f32 = 0.0008_f32;
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct Plane {
pub normal: V3,
pub offset: f32,
}
impl default::Default for Plane {
#[inline] fn default() -> Plane { plane(0.0, 0.0, 1.0, 0.0) }
}
impl fmt::Display for Plane {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "plane(({}, {}, {}), {})", self.normal.x, self.normal.y, self.normal.z, self.offset)
}
}
#[repr(u8)]
#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
pub enum PlaneTestResult {
Coplanar = 0b00,
Under = 0b01,
Over = 0b10,
Split = 0b11, // Under | Over, not possible for points
}
impl Neg for Plane {
type Output = Plane;
#[inline] fn neg(self) -> Plane { Plane::new(-self.normal, -self.offset) }
}
impl Plane {
#[inline] pub fn zero() -> Plane { Plane::new(V3::zero(), 0.0) }
#[inline]
pub fn to_v4(&self) -> V4 {
V4::expand(self.normal, self.offset)
}
#[inline]
pub fn from_v4(v: V4) -> Plane {
Plane::new(v.xyz(), v.w)
}
#[inline]
pub fn from_points(points: &[V3]) -> Plane {
assert_ge!(points.len(), 3);
let c = points.iter().fold(V3::zero(), |a, &b| a+b) / (points.len() as f32);
let mut n = V3::zero();
for i in 0..points.len() {
let i1 = (i + 1) % points.len();
n += cross(points[i] - c, points[i1] - c);
}
Plane::from_norm_and_point(n.norm_or(0.0, 0.0, 1.0), c)
}
#[inline]
pub fn new(normal: V3, offset: f32) -> Plane {
Plane{ normal: normal, offset: offset }
}
#[inline]
pub fn from_tri(v0: V3, v1: V3, v2: V3) -> Plane {
Plane::from_norm_and_point(tri_normal(v0, v1, v2), v0)
}
#[inline]
pub fn from_norm_and_point(n: V3, pt: V3) -> Plane {
Plane::new(n, -dot(n, pt))
}
#[inline]
pub fn intersect_with_line(&self, line_p0: V3, line_p1: V3) -> V3 {
let dif = line_p1 - line_p0;
let dn = self.normal.dot(dif);
let t = safe_div0(-(self.offset + dot(self.normal, line_p0)), dn);
line_p0 + dif*t
}
#[inline]
pub fn project(&self, pt: V3) -> V3 {
pt - self.normal * dot(self.to_v4(), V4::expand(pt, 1.0))
}
#[inline]
pub fn translate(&self, v: V3) -> Plane {
Plane::new(self.normal, self.offset - dot(self.normal, v))
}
#[inline]
pub fn rotate(&self, r: Quat) -> Plane {
Plane::new(r * self.normal, self.offset)
}
#[inline]
pub fn transform(self, p: Pose) -> Plane {
let norm = p.orientation * self.normal;
let offset = self.offset - dot(norm, p.position);
Plane::new(norm, offset)
}
#[inline]
pub fn scale3(&self, s: V3) -> Plane {
let new_normal = self.normal / s;
let len = new_normal.length();
debug_assert!(!len.approx_zero());
Plane::new(new_normal/len, self.offset/len)
}
#[inline]
pub fn scale(&self, s: f32) -> Plane {
Plane::new(self.normal, self.offset*s)
}
#[inline]
pub fn test_e(&self, pos: V3, e: f32) -> PlaneTestResult {
debug_assert_ge!(e, 0.0);
let a = dot(pos, self.normal) + self.offset;
if a > e { PlaneTestResult::Over }
else if a < -e { PlaneTestResult::Under }
else { PlaneTestResult::Coplanar }
}
#[inline]
pub fn split_test_e(&self, verts: &[V3], e: f32) -> PlaneTestResult {
let u = self.split_test_val_e(verts, e);
if u == PlaneTestResult::Coplanar as usize { PlaneTestResult::Coplanar }
else if u == PlaneTestResult::Under as usize { PlaneTestResult::Under }
else if u == PlaneTestResult::Over as usize { PlaneTestResult::Over }
else if u == PlaneTestResult::Split as usize { PlaneTestResult::Split }
else { unreachable!("bad plane test result: {}", u) }
}
#[inline]
pub fn split_test_val_e(&self, verts: &[V3], e: f32) -> usize {
let mut u = 0usize;
for &v in verts.iter() {
u |= self.test_e(v, e) as usize;
if u == PlaneTestResult::Split as usize {
break;
}
}
u
}
#[inline]
pub fn offset_by(&self, o: f32) -> Plane {
Plane::new(self.normal, self.offset+o)
}
#[inline]
pub fn split_test(&self, verts: &[V3]) -> PlaneTestResult {
self.split_test_e(verts, DEFAULT_PLANE_WIDTH)
}
#[inline]
pub fn split_test_val(&self, verts: &[V3]) -> usize {
self.split_test_val_e(verts, DEFAULT_PLANE_WIDTH)
}
#[inline]
pub fn test(&self, pos: V3) -> PlaneTestResult {
self.test_e(pos, DEFAULT_PLANE_WIDTH)
}
}
impl Dot for Plane {
#[inline]
fn dot(self, o: Plane) -> f32 {
self.to_v4().dot(o.to_v4())
}
}
#[inline]
pub fn plane(nx: f32, ny: f32, nz: f32, o: f32) -> Plane {
Plane::new(vec3(nx, ny, nz), o)
}
|
use std::cmp;
use std::collections::HashSet;
use std::collections::VecDeque;
use std::env;
use std::fs;
fn find_invalid(cipher: &Vec<i64>, preamble_len: usize) -> i64 {
let mut preamble: VecDeque<i64> = cipher.iter().take(preamble_len).map(|x| *x).collect();
let mut current: HashSet<i64> = HashSet::new();
for i in preamble.iter() {
current.insert(*i);
}
for val in cipher.iter().skip(preamble_len) {
let mut found = false;
for i in preamble.iter().by_ref() {
match current.get(&(val - *i)) {
None => continue,
Some(_) => {
found = true;
break;
}
}
}
if !found {
return *val;
}
let head: i64 = preamble.pop_front().unwrap();
current.remove(&head);
preamble.push_back(*val);
current.insert(*val);
}
unreachable!();
}
fn find_sequence(cipher: Vec<i64>, value: i64) -> i64 {
for i in 0..cipher.len() {
for j in i..cipher.len() {
let slice: &[i64] = &cipher[i..j];
let total: i64 = slice.iter().sum();
if total == value {
let (min, max) = slice.iter().fold((i64::MAX, i64::MIN), |(min, max), x| {
(cmp::min(min, *x), cmp::max(max, *x))
});
return min + max;
} else if total > value {
break;
}
}
}
unreachable!();
}
fn main() {
let args: Vec<String> = env::args().collect();
let cipher: Vec<i64> = fs::read_to_string(&args[1])
.unwrap()
.lines()
.map(str::parse::<i64>)
.map(Result::unwrap)
.collect();
let weakness = find_invalid(&cipher, 25);
let sequence = find_sequence(cipher, weakness);
println!("weakness = {}", sequence);
}
|
use thiserror::Error;
use crate::indices::Room;
#[derive(Debug, Clone, Error)]
pub enum RoomGenerationParamsError {
#[error("Tile probabilities must be in interval [0, 1.0) and their sum must be less than 1! {self:?}")]
BadProbabilities { chance_plain: f32, chance_wall: f32 },
#[error("Radius must be at least 4, got {radius}")]
BadRadius { radius: u32 },
}
#[derive(Debug, Clone)]
pub struct RoomGenerationParams {
pub seed: u64,
pub room: Room,
pub radius: u32,
pub plain_dilation: u32,
pub chance_plain: f32,
pub chance_wall: f32,
}
#[derive(Debug, Clone, Default)]
pub struct RoomGenerationParamsBuilder {
pub radius: u32,
pub plain_dilation: u32,
pub chance_plain: f32,
pub chance_wall: f32,
pub seed: u64,
pub room: Room,
}
impl RoomGenerationParams {
pub fn builder() -> RoomGenerationParamsBuilder {
RoomGenerationParamsBuilder {
radius: 4,
plain_dilation: 1,
chance_plain: 1.0 / 3.0,
chance_wall: 1.0 / 3.0,
seed: 0xb00b135,
..Default::default()
}
}
}
impl RoomGenerationParamsBuilder {
pub fn build(self) -> Result<RoomGenerationParams, RoomGenerationParamsError> {
if !self.chance_wall.is_finite()
|| !self.chance_plain.is_finite()
|| self.chance_wall < 0.0
|| 1.0 <= self.chance_wall
|| self.chance_plain < 0.0
|| 1.0 < self.chance_wall + self.chance_plain
{
return Err(RoomGenerationParamsError::BadProbabilities {
chance_plain: self.chance_plain,
chance_wall: self.chance_wall,
});
}
if self.radius == 0 {
return Err(RoomGenerationParamsError::BadRadius {
radius: self.radius,
});
}
Ok(RoomGenerationParams {
seed: self.seed,
room: self.room,
radius: self.radius,
plain_dilation: self.plain_dilation,
chance_plain: self.chance_plain,
chance_wall: self.chance_wall,
})
}
pub fn with_seed(mut self, seed: u64) -> Self {
self.seed = seed;
self
}
pub fn with_room(mut self, room_id: crate::prelude::Axial) -> Self {
self.room = Room(room_id);
self
}
pub fn with_radius(mut self, radius: u32) -> Self {
self.radius = radius;
self
}
pub fn with_plain_dilation(mut self, plain_dilation: u32) -> Self {
self.plain_dilation = plain_dilation;
self
}
pub fn with_chance_plain(mut self, chance_plain: f32) -> Self {
self.chance_plain = chance_plain;
self
}
pub fn with_chance_wall(mut self, chance_wall: f32) -> Self {
self.chance_wall = chance_wall;
self
}
}
|
use std::cmp;
use card::Card;
use types;
use calculator::utility;
pub fn test(cards: Vec<Card>) -> Option<types::Combination> {
if cards.len() < 3 {
return None;
}
let hash_map = utility::get_count_hash_map(&cards[..]);
let mut largest_set: Option<types::Rank> = None;
for (&rank_value, &count) in &hash_map {
if count == 3 {
// if count == 4 we have a four of a kind, do not consider this.
if let Some(current_set_rank) = largest_set {
largest_set = Some(cmp::max(current_set_rank, rank_value));
} else {
largest_set = Some(rank_value);
}
}
}
if let Some(current_set_rank) = largest_set {
return Some(types::Combination::ThreeOfAKind(current_set_rank));
}
None
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn none_for_empty() {
assert_eq!(None, test(vec![]));
}
#[test]
fn none_for_seven_cards_without_three_of_a_kind() {
assert_eq!(
None,
test(vec![
Card {
rank: types::Rank::Four,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Hearts,
},
Card {
rank: types::Rank::Six,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Hearts,
},
Card {
rank: types::Rank::King,
suit: types::Suit::Clubs,
},
Card {
rank: types::Rank::Ace,
suit: types::Suit::Diamonds,
},
Card {
rank: types::Rank::Ace,
suit: types::Suit::Hearts,
},
])
);
}
#[test]
fn none_for_seven_cards_with_four_of_a_kind() {
// because of impl specifics
assert_eq!(
None,
test(vec![
Card {
rank: types::Rank::Four,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Hearts,
},
Card {
rank: types::Rank::Six,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Four,
suit: types::Suit::Hearts,
},
Card {
rank: types::Rank::Four,
suit: types::Suit::Clubs,
},
Card {
rank: types::Rank::Four,
suit: types::Suit::Diamonds,
},
Card {
rank: types::Rank::Ace,
suit: types::Suit::Hearts,
},
])
);
}
#[test]
fn option_for_seven_cards_with_three_of_a_kind() {
// because of impl specifics
assert_eq!(
Some(types::Combination::ThreeOfAKind(types::Rank::Four)),
test(vec![
Card {
rank: types::Rank::Jack,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Two,
suit: types::Suit::Hearts,
},
Card {
rank: types::Rank::Six,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Four,
suit: types::Suit::Hearts,
},
Card {
rank: types::Rank::Four,
suit: types::Suit::Clubs,
},
Card {
rank: types::Rank::Four,
suit: types::Suit::Diamonds,
},
Card {
rank: types::Rank::Ace,
suit: types::Suit::Hearts,
},
])
);
}
#[test]
fn option_for_seven_cards_chooses_highest_with_three_of_a_kind() {
// because of impl specifics
assert_eq!(
Some(types::Combination::ThreeOfAKind(types::Rank::Jack)),
test(vec![
Card {
rank: types::Rank::Four,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Four,
suit: types::Suit::Hearts,
},
Card {
rank: types::Rank::Jack,
suit: types::Suit::Spades,
},
Card {
rank: types::Rank::Four,
suit: types::Suit::Hearts,
},
Card {
rank: types::Rank::Ace,
suit: types::Suit::Clubs,
},
Card {
rank: types::Rank::Jack,
suit: types::Suit::Diamonds,
},
Card {
rank: types::Rank::Jack,
suit: types::Suit::Hearts,
},
])
);
}
}
|
use failure::Fallible;
use reqwest;
use rodio::Sink;
use slog_scope::{info, warn};
use std::convert::From;
use std::env;
use std::fmt::{self, Display};
use std::io::BufReader;
use std::sync::Arc;
use std::thread::{Builder, JoinHandle};
use async_trait::async_trait;
use crossbeam_channel::{self, Receiver, Sender};
use tokio::runtime::Runtime;
use tokio::task::spawn_blocking;
pub use err::*;
use crate::components::finite_stream::FiniteStream;
use crate::player::{PauseState, PlaybackHandle};
pub struct HttpPlayer {
_handle: Option<JoinHandle<()>>,
basic_auth: Option<(String, String)>,
http_client: Arc<reqwest::Client>,
}
pub struct HttpPlaybackHandle {
tx: Sender<()>,
sink: Arc<Sink>,
basic_auth: Option<(String, String)>,
url: String,
http_client: Arc<reqwest::Client>,
}
impl HttpPlaybackHandle {
pub async fn queue(&self) -> Fallible<()> {
let mut builder = self.http_client.get(&self.url);
if let Some((ref username, ref password)) = &self.basic_auth {
builder = builder.basic_auth(username, Some(password));
}
let response = builder.send().await.unwrap();
let stream = spawn_blocking(move || FiniteStream::from_response(response).unwrap()).await?;
let source =
spawn_blocking(move || rodio::Decoder::new(BufReader::new(stream)).unwrap()).await?;
self.sink.append(source);
Ok(())
}
}
#[async_trait]
impl PlaybackHandle for HttpPlaybackHandle {
async fn stop(&self) -> Fallible<()> {
// info!("Cancelling HTTP Player");
// self.tx.send(()).unwrap();
self.sink.stop();
Ok(())
}
async fn is_complete(&self) -> Fallible<bool> {
Ok(self.sink.empty())
}
async fn pause(&self) -> Fallible<()> {
self.sink.pause();
Ok(())
}
async fn cont(&self, pause_state: PauseState) -> Fallible<()> {
self.sink.play();
Ok(())
}
async fn replay(&self) -> Fallible<()> {
self.sink.stop();
self.queue().await?;
self.sink.play();
Ok(())
}
}
impl HttpPlayer {
pub fn new() -> Fallible<Self> {
info!("Creating new HttpPlayer...");
// let (tx, rx) = crossbeam_channel::bounded(1);
let http_client = Arc::new(reqwest::Client::new());
let basic_auth = {
let username: Option<String> = env::var("HTTP_PLAYER_USERNAME")
.map(|x| Some(x))
.unwrap_or(None);
let password: Option<String> = env::var("HTTP_PLAYER_PASSWORD")
.map(|x| Some(x))
.unwrap_or(None);
if let (Some(username), Some(password)) = (username, password) {
Some((username, password))
} else {
None
}
};
let player = HttpPlayer {
_handle: None,
basic_auth,
http_client,
};
Ok(player)
}
pub async fn start_playback(
&self,
url: &str,
pause_state: Option<PauseState>,
) -> Result<HttpPlaybackHandle, failure::Error> {
if let Some(pause_state) = pause_state {
warn!("Ignoring pause state: {:?}", pause_state);
}
let device = rodio::default_output_device().unwrap();
let url = url.clone().to_string();
let http_client = self.http_client.clone();
let basic_auth = self.basic_auth.clone();
let (tx, rx) = crossbeam_channel::bounded(1);
let sink = Arc::new(Sink::new(&device));
let sink_cp = sink.clone();
let _handle = Builder::new()
.name("http-player".to_string())
.spawn(move || {
let mut rt = Runtime::new().unwrap();
let f = async {
let _msg = rx.recv();
};
rt.block_on(f);
})
.unwrap();
let handle = HttpPlaybackHandle {
tx,
sink,
basic_auth,
url,
http_client: self.http_client.clone(),
};
handle.queue().await?;
handle
.cont(PauseState {
pos: std::time::Duration::from_secs(0),
})
.await?;
Ok(handle)
}
}
pub mod err {
use super::*;
#[derive(Debug)]
pub enum Error {
IO(std::io::Error),
Http(reqwest::Error),
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Error::IO(err) => write!(f, "HTTP Player IO Error {}", err),
Error::Http(err) => write!(f, "HTTP Player HTTP Error {}", err),
}
}
}
impl From<std::io::Error> for Error {
fn from(err: std::io::Error) -> Self {
Error::IO(err)
}
}
impl From<reqwest::Error> for Error {
fn from(err: reqwest::Error) -> Self {
Error::Http(err)
}
}
impl std::error::Error for Error {}
}
|
use super::super::{ShowingContextmenu, ShowingContextmenuData};
use super::*;
use crate::arena::Untyped;
impl Room {
pub(super) fn render_contextmenu(&self, contextmenu: &ShowingContextmenu) -> Html {
Html::div(
Attributes::new().class(Self::class("contextmenu-mask")),
Events::new()
.on_click(self, |_| Msg::SetShowingContextmenu(None))
.on("contextmenu", self, |e| {
let e = unwrap!(e.dyn_into::<web_sys::MouseEvent>().ok(); Msg::NoOp);
Msg::OnTableContextmenu(e)
}),
vec![Html::div(
Attributes::new()
.class(Self::class("contextmenu"))
.style("left", format!("{}px", contextmenu.page_x))
.style("top", format!("{}px", contextmenu.page_y)),
Events::new(),
match &contextmenu.data {
ShowingContextmenuData::Boxblock(block) => {
self.render_contextmenu_boxblock(block)
}
ShowingContextmenuData::Character(block) => {
self.render_contextmenu_character(block)
}
ShowingContextmenuData::Craftboard(block) => {
self.render_contextmenu_craftboard(block)
}
ShowingContextmenuData::Textboard(block) => {
self.render_contextmenu_textboard(block)
}
},
)],
)
}
fn render_contextmenu_boxblock(&self, boxblock: &BlockMut<block::Boxblock>) -> Vec<Html> {
vec![
Marker::light(
Attributes::new(),
Events::new(),
vec![Html::text(
boxblock
.map(|boxblock| boxblock.name().clone())
.unwrap_or(String::from("")),
)],
),
Btn::menu(
Attributes::new(),
Events::new().on_click(self, {
let block_id = boxblock.id();
move |_| Msg::OpenBoxblockModeless(block_id)
}),
vec![Html::text("詳細を表示")],
),
self.render_is_fixed_position(
boxblock
.map(|boxblock| boxblock.is_fixed_position())
.unwrap_or(false),
BlockMut::clone(&boxblock).untyped(),
),
self.render_is_bind_to_grid(
boxblock
.map(|boxblock| boxblock.is_bind_to_grid())
.unwrap_or(false),
BlockMut::clone(&boxblock).untyped(),
),
self.render_create_component(BlockMut::clone(&boxblock).untyped()),
Btn::menu(
Attributes::new(),
Events::new().on_click(self, {
let block_id = boxblock.id();
move |_| Msg::RemoveBoxblock(block_id)
}),
vec![Html::text("削除")],
),
]
}
fn render_contextmenu_character(&self, character: &BlockMut<block::Character>) -> Vec<Html> {
vec![
Marker::light(
Attributes::new(),
Events::new(),
vec![Html::text(
character
.map(|character| character.name().clone())
.unwrap_or(String::from("")),
)],
),
Btn::menu(
Attributes::new(),
Events::new().on_click(self, {
let block_id = character.id();
move |_| Msg::OpenCharacterModeless(block_id)
}),
vec![Html::text("詳細を表示")],
),
Btn::menu(
Attributes::new(),
Events::new().on_click(self, {
let user = ChatUser::Character(BlockMut::clone(&character));
move |_| Msg::OpenChatModeless(user)
}),
vec![Html::text("チャットを表示")],
),
self.render_is_fixed_position(
character
.map(|character| character.is_fixed_position())
.unwrap_or(false),
BlockMut::clone(&character).untyped(),
),
self.render_is_bind_to_grid(
character
.map(|character| character.is_bind_to_grid())
.unwrap_or(false),
BlockMut::clone(&character).untyped(),
),
Btn::menu(
Attributes::new(),
Events::new().on_click(self, {
let block_id = character.id();
move |_| Msg::RemoveCharacter(block_id)
}),
vec![Html::text("削除")],
),
]
}
fn render_contextmenu_craftboard(&self, craftboard: &BlockMut<block::Craftboard>) -> Vec<Html> {
vec![
Marker::light(
Attributes::new(),
Events::new(),
vec![Html::text(
craftboard
.map(|craftboard| craftboard.name().clone())
.unwrap_or(String::from("")),
)],
),
Btn::menu(
Attributes::new(),
Events::new().on_click(self, {
let block_id = craftboard.id();
move |_| Msg::OpenCraftboardModeless(block_id)
}),
vec![Html::text("詳細を表示")],
),
self.render_is_fixed_position(
craftboard
.map(|craftboard| craftboard.is_fixed_position())
.unwrap_or(false),
BlockMut::clone(&craftboard).untyped(),
),
self.render_is_bind_to_grid(
craftboard
.map(|craftboard| craftboard.is_bind_to_grid())
.unwrap_or(false),
BlockMut::clone(&craftboard).untyped(),
),
self.render_create_component(BlockMut::clone(&craftboard).untyped()),
Btn::menu(
Attributes::new(),
Events::new().on_click(self, {
let block_id = craftboard.id();
move |_| Msg::RemoveCraftboard(block_id)
}),
vec![Html::text("削除")],
),
]
}
fn render_contextmenu_textboard(&self, textboard: &BlockMut<block::Textboard>) -> Vec<Html> {
vec![
Marker::light(
Attributes::new(),
Events::new(),
vec![Html::text(
textboard
.map(|textboard| textboard.title().clone())
.unwrap_or(String::from("")),
)],
),
Btn::menu(
Attributes::new(),
Events::new().on_click(self, {
let block_id = textboard.id();
move |_| Msg::OpenTextboardModeless(block_id)
}),
vec![Html::text("詳細を表示")],
),
self.render_create_component(BlockMut::clone(&textboard).untyped()),
Btn::menu(
Attributes::new(),
Events::new().on_click(self, {
let block_id = textboard.id();
move |_| Msg::RemoveTextboard(block_id)
}),
vec![Html::text("削除")],
),
]
}
fn render_is_fixed_position(&self, is_fixed_position: bool, block: BlockMut<Untyped>) -> Html {
Btn::menu(
Attributes::new(),
Events::new().on_click(self, move |_| {
Msg::SetBlockIsFixedPosition(block, !is_fixed_position)
}),
vec![if is_fixed_position {
Html::text("固定解除")
} else {
Html::text("場所を固定")
}],
)
}
fn render_is_bind_to_grid(&self, is_bind_to_grid: bool, block: BlockMut<Untyped>) -> Html {
Btn::menu(
Attributes::new(),
Events::new().on_click(self, move |_| {
Msg::SetBlockIsBindToGrid(block, !is_bind_to_grid)
}),
vec![if is_bind_to_grid {
Text::condense_75("グリッドにスナップしない")
} else {
Text::condense_75("グリッドにスナップする")
}],
)
}
fn render_create_component(&self, block: BlockMut<Untyped>) -> Html {
Btn::menu(
Attributes::new(),
Events::new().on_click(self, move |_| Msg::CreateComponent(block)),
vec![Text::condense_75("コンポーネントとして登録")],
)
}
}
|
pub mod Token {
pub struct Token {
pub _type: String,
pub _value: String,
}
pub fn new() -> Token {
Token {
_type: String::from("None type"),
_value: String::from("None value")
}
}
}
pub mod Node {
pub struct Node {
pub _level: String,
pub _type: String,
pub _name: String,
pub _value: String,
pub to: Vec<usize>,
}
pub fn new() -> Node {
Node {
_level : "None _level".to_string(),
_type : "None type".to_string() ,
_name : "None name".to_string() ,
_value : "None value".to_string() ,
to: Vec::new(),
}
}
}
// Program:
// _level = "Program"
// _type = "FILE"
// _name = file name
// Function:
// _level = "Function"
// _type = TYPE_KEYWORD
// _name = function name
// Statement:
// _level = "Statement"
// _type = STAT_KEYWORD
// _name = stat name
// Expression:
// _level = "Expression"
// _type = KEY_WORD
// _value = value |
use crate::error::{Error, Result};
use quote::ToTokens;
use syn::spanned::Spanned;
use syn::{
punctuated::Punctuated, Expr, ExprBinary, ExprField, ExprLit, ExprMethodCall, ExprParen,
ExprPath, ExprUnary, Token,
};
const TRACING : bool = false;
// use proc_macro2::Span;
/// A visitor trait for a subset of expressions.
/// Default behaviour is to clone the expression.
pub trait Visitor {
fn visit_method_call(&self, expr: &ExprMethodCall) -> Result<Expr> {
let receiver = self.visit_expr(&expr.receiver)?;
let args: Punctuated<Expr, Token![,]> = expr
.args
.iter()
.map(|a| self.visit_expr(a))
.collect::<Result<Punctuated<Expr, Token![,]>>>()?;
Ok(ExprMethodCall {
attrs: expr.attrs.clone(),
receiver: Box::new(receiver),
dot_token: expr.dot_token.clone(),
method: expr.method.clone(),
turbofish: expr.turbofish.clone(),
paren_token: expr.paren_token.clone(),
args: args,
}
.into())
}
// eg. "1.0"
fn visit_lit(&self, expr: &ExprLit) -> Result<Expr> {
Ok(expr.clone().into())
}
/// eg. "x" or "f64::const::PI"
fn visit_path(&self, exprpath: &ExprPath) -> Result<Expr> {
Ok(exprpath.clone().into())
}
/// eg. "x.y"
fn visit_field(&self, exprfield: &ExprField) -> Result<Expr> {
let base = self.visit_expr(&exprfield.base)?;
Ok(ExprField {
attrs: exprfield.attrs.clone(),
base: Box::new(base),
dot_token: exprfield.dot_token,
member: exprfield.member.clone(),
}
.into())
}
/// eg. "(x)"
fn visit_paren(&self, exprparen: &ExprParen) -> Result<Expr> {
let expr = self.visit_expr(&exprparen.expr)?;
Ok(ExprParen {
attrs: exprparen.attrs.clone(),
paren_token: exprparen.paren_token.clone(),
expr: Box::new(expr),
}
.into())
}
/// eg. "let x = 1;" "fn x();" "x * 2" or "x * 2;"
// fn visit_stmt(&self, stmt: &Stmt) -> Result<Stmt, Error> {
// match stmt {
// Stmt::Local(local) => Ok(local.clone().into()),
// Stmt::Item(item) => Ok(item.clone().into()),
// Stmt::Expr(expr) => Ok(self.visit_expr(expr).into()),
// Stmt::Semi(expr, semi) => Ok(Stmt::Semi(self.visit_expr(expr), semi)),
// }
// }
// fn visit_block(&self, block: &Block) -> Result<Expr> {
// let stmts = block.stmts.iter()
// .map(|stmt| self.visit_stmt(stmt))
// .collect::<Result<Vec<Stmt>>>()?;
// Ok(Block {
// brace_token: block.brace_token,
// stmts
// }.into())
// }
fn visit_binary(&self, exprbinary: &ExprBinary) -> Result<Expr> {
let left = self.visit_expr(&exprbinary.left)?;
let right = self.visit_expr(&exprbinary.right)?;
Ok(ExprBinary {
attrs: exprbinary.attrs.clone(),
left: Box::new(left),
op: exprbinary.op.clone(),
right: Box::new(right),
}
.into())
}
fn visit_unary(&self, exprunary: &ExprUnary) -> Result<Expr> {
let expr = self.visit_expr(&exprunary.expr)?;
Ok(ExprUnary {
attrs: exprunary.attrs.clone(),
op: exprunary.op,
expr: Box::new(expr),
}
.into())
}
// Visit a generalised expression.
fn visit_expr(&self, expr: &Expr) -> Result<Expr> {
if TRACING {
println!("visit_expr: {}", expr.to_token_stream());
let expr = self.visit_expr_core(&expr).unwrap();
println!("..visit_expr: {}", expr.to_token_stream());
Ok(expr)
} else {
self.visit_expr_core(&expr)
}
}
fn visit_expr_core(&self, expr: &Expr) -> Result<Expr> {
use Expr::*;
match expr {
Unary(exprunary) => self.visit_unary(exprunary),
Binary(exprbinary) => self.visit_binary(&exprbinary),
MethodCall(exprmethodcall) => self.visit_method_call(exprmethodcall),
Paren(exprparen) => self.visit_paren(&exprparen),
Lit(exprlit) => self.visit_lit(&exprlit),
Path(exprpath) => self.visit_path(exprpath),
Field(exprfield) => self.visit_field(exprfield),
_ => Err(Error::UnsupportedExpr(expr.span())),
}
}
}
|
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn sample_tests() {
assert_eq!(super::persistence(39), 3);
assert_eq!(super::persistence(4), 0);
assert_eq!(super::persistence(25), 2);
assert_eq!(super::persistence(999), 4);
}
}
fn persistence(num: u64) -> u64 {
// if num < 10{
// return 0;
// }
let mut index = 0;
multi_persist(num,&mut index);
index
}
fn multi_persist(num:u64,index:&mut u64){
if(num < 10) {
return;
}
let result:u64 = num.to_string().chars().fold(1, |acc,x| {
acc*x.to_digit(10).unwrap() as u64
});
*index += 1;
multi_persist(result, index);
}
/*
// Some "Best practice"
// use String maybe performance-critical sometime
// In fact, it's a rem-product recursion.
pub fn persistence(num: u64) -> u64 {
let mut n = num;
let mut count = 0;
while n > 9 {
n = prod(n);
count +=1;
}
count
}
fn prod(n: u64) -> u64 {
let mut n = n;
let mut prod = 1;
while n > 0 {
prod *= n%10;
n /= 10;
}
prod
}
*/ |
use crate::blob::blob::responses::GetBlockListResponse;
use crate::blob::blob::BlockListType;
use crate::blob::prelude::*;
use azure_core::headers::{add_optional_header, add_optional_header_ref};
use azure_core::prelude::*;
pub struct GetBlockListBuilder<'a> {
blob_client: &'a BlobClient,
block_list_type: BlockListType,
blob_versioning: Option<&'a BlobVersioning>,
lease_id: Option<&'a LeaseId>,
client_request_id: Option<ClientRequestId<'a>>,
timeout: Option<Timeout>,
}
impl<'a> GetBlockListBuilder<'a> {
pub(crate) fn new(blob_client: &'a BlobClient) -> Self {
Self {
blob_client,
block_list_type: BlockListType::Committed,
blob_versioning: None,
lease_id: None,
client_request_id: None,
timeout: None,
}
}
setters! {
block_list_type: BlockListType => block_list_type,
blob_versioning: &'a BlobVersioning => Some(blob_versioning),
lease_id: &'a LeaseId => Some(lease_id),
client_request_id: ClientRequestId<'a> => Some(client_request_id),
timeout: Timeout => Some(timeout),
}
pub async fn execute(
&self,
) -> Result<GetBlockListResponse, Box<dyn std::error::Error + Send + Sync>> {
let mut url = self.blob_client.url_with_segments(None)?;
url.query_pairs_mut().append_pair("comp", "blocklist");
self.blob_versioning.append_to_url_query(&mut url);
self.block_list_type.append_to_url_query(&mut url);
self.timeout.append_to_url_query(&mut url);
debug!("url == {:?}", url);
let (request, _url) = self.blob_client.prepare_request(
url.as_str(),
&http::Method::GET,
&|mut request| {
request = add_optional_header_ref(&self.lease_id, request);
request = add_optional_header(&self.client_request_id, request);
request
},
None,
)?;
let response = self
.blob_client
.http_client()
.execute_request_check_status(request, http::StatusCode::OK)
.await?;
debug!("response.headers() == {:#?}", response.headers());
Ok(GetBlockListResponse::from_response(
response.headers(),
response.body(),
)?)
}
}
|
use std::thread;
use std::sync::{Mutex, Arc};
use std::collections::HashMap;
pub fn frequency(input: &[&str], worker_count: usize) -> HashMap<char, usize> {
let mut thread_details = Vec::new();
let counter = Arc::new(Mutex::new(0));
let hm: Arc<Mutex<HashMap<char, usize>>> = Arc::new(Mutex::new(HashMap::new()));
let input_converted = Arc::new(Mutex::new(input.iter().map(|item| item.to_string()).collect::<Vec<String>>()));
let res_clone = Arc::clone(&hm);
for worker in 0..input.len() {
if worker < worker_count {
let counter = Arc::clone(&counter);
let hm_worker = Arc::clone(&hm);
let input_converted = Arc::clone(&input_converted);
let t_details = thread::spawn(move || {
let mut work_item_index = counter.lock().unwrap();
let mut worker_output = hm_worker.lock().unwrap();
let strings_converted_copy = input_converted.lock().unwrap();
while *work_item_index != strings_converted_copy.len() {
*work_item_index += 1;
let _map: Vec<_> = strings_converted_copy[*work_item_index - 1].to_lowercase().chars().map(|char| {
if char.is_alphabetic() {
match worker_output.contains_key(&char) {
true => {
let value = worker_output.get_mut(&char).unwrap();
*value += 1;
}, false => {
worker_output.insert(char, 1 as usize);
}
}
}
}).collect();
}
});
thread_details.push(t_details);
}
}
for handle in thread_details {
handle.join().unwrap();
}
let result = res_clone.lock().unwrap();
result.clone()
}
|
#[derive(Clone, Debug)]
pub struct Signal {
signal_pattern: Vec<String>,
digits: Vec<String>,
}
#[aoc_generator(day8)]
pub fn input_generator(input: &str) -> Vec<Signal> {
input
.lines()
.map(|l| {
let mut line = l.split(" | ");
Signal {
signal_pattern: line
.next()
.unwrap()
.split(" ")
.map(|s| s.to_string())
.collect(),
digits: line
.next()
.unwrap()
.split(" ")
.map(|d| d.to_string())
.collect(),
}
})
.collect()
}
#[aoc(day8, part1)]
pub fn solve_part1(input: &Vec<Signal>) -> usize {
let mut count: usize = 0;
for i in 0..input.len() {
count += input[i]
.digits
.clone()
.into_iter()
.filter(|d| d.len() == 2 || d.len() == 4 || d.len() == 3 || d.len() == 7)
.collect::<Vec<String>>()
.len();
}
return count;
}
#[aoc(day8, part2)]
pub fn solve_part2(_input: &Vec<Signal>) -> i32 {
let count: i32 = 0;
// let mut digits: Vec<i32> = vec![];
// let digit_list = input.iter().map();
return count;
}
#[cfg(test)]
mod tests {
use super::*;
const INPUT: &str = r#"be cfbegad cbdgef fgaecd cgeb fdcge agebfd fecdb fabcd edb | fdgacbe cefdb cefbgd gcbe
edbfga begcd cbg gc gcadebf fbgde acbgfd abcde gfcbed gfec | fcgedb cgb dgebacf gc
fgaebd cg bdaec gdafb agbcfd gdcbef bgcad gfac gcb cdgabef | cg cg fdcagb cbg
fbegcd cbd adcefb dageb afcb bc aefdc ecdab fgdeca fcdbega | efabcd cedba gadfec cb
aecbfdg fbg gf bafeg dbefa fcge gcbea fcaegb dgceab fcbdga | gecf egdcabf bgf bfgea
fgeab ca afcebg bdacfeg cfaedg gcfdb baec bfadeg bafgc acf | gebdcfa ecba ca fadegcb
dbcfg fgd bdegcaf fgec aegbdf ecdfab fbedc dacgb gdcebf gf | cefg dcbef fcge gbcadfe
bdfegc cbegaf gecbf dfcage bdacg ed bedf ced adcbefg gebcd | ed bcgafe cdgba cbgef
egadfb cdbfeg cegd fecab cgb gbdefca cg fgcdab egfdb bfceg | gbdfcae bgc cg cgb
gcafb gcf dcaebfg ecagb gf abcdeg gaef cafbge fdbac fegbdc | fgae cfgab fg bagce"#;
#[test]
fn example1() {
assert_eq!(solve_part1(&input_generator(INPUT)), 26);
}
#[test]
fn example2() {
assert_eq!(solve_part2(&input_generator(INPUT)), 61229);
}
}
|
extern crate clean_architecture_rs;
use std::{env};
use actix_web::{
error, get, Responder, guard, middleware, web, App, Error, HttpRequest, HttpResponse, HttpServer,
Result,
};
use clean_architecture_rs::adapter::controller::user_controller::show as user_show;
#[get("/")]
pub async fn home(info: web::Path<()>) -> impl Responder {
Some("hoge")
}
#[actix_rt::main]
async fn main() -> std::io::Result<()> {
env::set_var("RUST_LOG", "actix_web=debug,actix_server=info");
env_logger::init();
HttpServer::new(|| App::new()
.wrap(middleware::Logger::default())
.service(home)
.service(user_show))
.bind("127.0.0.1:8080")?
.run()
.await
}
|
use crate::mod_demo::module1;
use crate::mod_demo::module2;
use crate::mod_demo::module3::blah;
pub fn doit() {
let value = fmt!(400);
println!("{:?}", value);
println!("module4");
module1::blah::doit();
module2::blah::doit();
blah::doit();
}
|
use std::fs;
use math::round;
fn get_input() -> String {
fs::read_to_string("input/day_one.txt")
.expect("Something went wrong reading the file")
}
pub fn one() {
let contents = get_input();
let total_fuel: i32 = contents
.lines()
.map(|s| s.parse::<f64>().unwrap())
.map(|v| (round::floor(v / 3.0, 0) - 2.0) as i32)
.sum();
println!("Total fuel needed is {}", total_fuel);
}
fn calc_fuel_all(mass: f64) -> i32 {
let mut current_mass = mass;
let mut fuel_needed = 0;
loop {
current_mass = round::floor(current_mass / 3.0, 0) - 2.0;
if current_mass >= 0.0 {
fuel_needed += current_mass as i32;
} else {
break;
}
}
fuel_needed
}
pub fn two() {
let contents = get_input();
let total_fuel: i32 = contents
.lines()
.map(|s| s.parse::<f64>().unwrap())
.map(|v| calc_fuel_all(v))
.sum();
println!("Total fuel needed is {}", total_fuel);
} |
/// Used for numeric types that can be clamped to a minimum
/// and maximum range.
pub trait Clampable {
/// Returns the min_bound if self is less than min_bound,
/// returns the max_bound if self is greater than max_bound,
/// otherwise, returns self.
fn clamp_range(self, min_bound: Self, max_bound:Self)-> Self;
}
impl Clampable for f32 {
fn clamp_range(self, min_bound: Self, max_bound:Self)-> Self {
self.max(min_bound).min(max_bound)
}
} |
use crate::assembunny::*;
use crate::prelude::*;
pub fn pt1(program: Vec<Instruction>) -> Result<i64> {
let mut regs = Registers::default();
let mut program = Program::new(program)?;
program.run_to_end(&mut regs, |_| {});
Ok(regs[0])
}
pub fn pt2(program: Vec<Instruction>) -> Result<i64> {
let mut regs = Registers::default();
regs[2] = 1;
let mut program = Program::new(program)?;
program.run_to_end(&mut regs, |_| {});
Ok(regs[0])
}
pub fn parse(s: &str) -> IResult<&str, Vec<Instruction>> {
use parsers::*;
map_res(parse_assembunny, |instrs| {
for instr in &instrs {
match instr {
Instruction::Toggle(_) | Instruction::Out(_) => return Err(()),
_ => {}
}
}
Ok(instrs)
})(s)
}
#[test]
fn day12() -> Result<()> {
const EXAMPLE: &'static str = "\
cpy 41 a
inc a
inc a
dec a
jnz a 2
dec a";
let example: Vec<Instruction> = vec![
Instruction::Copy(Value::Constant(41), Value::Register(0)),
Instruction::Increment(0),
Instruction::Increment(0),
Instruction::Decrement(0),
Instruction::JumpIfNotZero(Value::Register(0), Value::Constant(2)),
Instruction::Decrement(0),
];
test_parse!(parse, EXAMPLE => example);
test_part!(pt1, example.clone() => 42);
Ok(())
}
|
use crate::PixelRenderer;
use std::cell::Cell;
#[allow(clippy::cast_ptr_alignment)]
pub fn pixels_to_u32<'a>(pixel_renderer: &'a PixelRenderer) -> &'a [Cell<u32>; 256 * 240] {
debug_assert_eq!((pixel_renderer.get_pixels().as_ptr() as usize) % 64, 0);
// SAFETY: the 'get_pixels()' byte slice is always aligned to at least 64
// bytes, making the cast safe. this is ensured by the vulkan implementation
// itself ('PixelRenderer' uses vulkan). see table 52, 'required limits' in:
// https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#memory-device-hostaccess
unsafe { &*(pixel_renderer.get_pixels().as_ptr() as *const _) }
}
macro_rules! error_exit {
($($arg:tt)*) => { {
if cfg!(test) {
panic!($($arg)*);
} else {
eprintln!($($arg)*);
std::process::exit(1);
}
} }
}
macro_rules! logln {
($( $args:expr ),*) => { if cfg!(feature = "logging") { println!( $( $args ),* ); } }
}
macro_rules! log {
($( $args:expr ),*) => { if cfg!(feature = "logging") { print!( $( $args ),* ); } }
}
|
pub mod file;
pub mod parse;
use clap::{App, Arg};
use file::*;
fn main() -> std::io::Result<()> {
let matches = App::new("My Super Program")
.version("1.0")
.author("Kevin K. <kbknapp@gmail.com>")
.about("Does awesome things")
.arg(
Arg::with_name("input")
.short("i")
.long("input")
.help("输入文件")
.required(true)
.takes_value(true),
)
.arg(
Arg::with_name("out")
.short("o")
.long("out")
.help("输出文件")
.takes_value(true),
)
.get_matches();
let input = matches.value_of("input").unwrap();
let out = matches.value_of("out");
let data = readfile(input);
let (_, parsed_data) = parse_file(&data).ok().expect("parse failed");
let parsed_data = check_symbol(parsed_data).expect("check symbol failed");
if out.is_some() {
write_to_file(out.unwrap(), parsed_data)
} else {
write_to_file("a.out", parsed_data)
}
}
|
use std::collections::VecDeque;
use std::sync::{Arc, Mutex};
use std::time::Duration;
use crossterm::event::{poll, read, Event, KeyEvent};
#[derive(Clone)]
pub struct KeyEventQueue<T: Send + Copy> {
inner: Arc<Mutex<VecDeque<T>>>,
}
impl<T: Send + Copy> KeyEventQueue<T> {
pub fn new() -> Self {
Self {
inner: Arc::new(Mutex::new(VecDeque::new())),
}
}
pub fn get_latest_event(&self) -> Option<T> {
let maybe_queue = self.inner.lock();
if let Ok(mut queue) = maybe_queue {
let el = queue.pop_back();
queue.clear();
el
} else {
panic!("poisoned mutex");
}
}
pub fn get_all_events(&self) -> Option<Vec<T>> {
let maybe_queue = self.inner.lock();
if let Ok(mut queue) = maybe_queue {
let drained = queue.drain(..).collect::<Vec<_>>();
queue.clear();
Some(drained)
} else {
panic!("poisoned mutex");
}
}
fn add_event(&self, event: T) -> usize {
if let Ok(mut queue) = self.inner.lock() {
queue.push_back(event);
queue.len()
} else {
panic!("poisoned mutex");
}
}
}
pub fn send_events(event_queue: &KeyEventQueue<KeyEvent>) -> crossterm::Result<()> {
loop {
if poll(Duration::from_millis(3))? {
match read()? {
// will not block
Event::Key(event) => {
event_queue.add_event(event);
}
Event::Mouse(_event) => {}
Event::Resize(_width, _height) => {}
}
}
}
}
|
fn main() {
let n = 16000000000;
let pi = 3.1415926535897;
let zeta_2 = zeta(n,2);
let pi_squared_over_6 = pi*pi / 6.0;
println!(" zeta(2) = {}", zeta_2);
println!("pi^2 / 6.0 = {}", pi_squared_over_6);
println!(" error = {}", pi_squared_over_6 - zeta_2);
}
fn zeta(n :i64, a :i64) -> f64 {
let mut s = 0.0;
let mut p ;
let mut q ;
// first opt ... don't use powf(f64) as it will be slow.
for i in 1..n {
p = 1.0/i as f64;
q = 1.0 ;
for _j in 1..a+1 {
q = q * p;
}
s+=q
}
return s;
}
|
use std::convert::TryFrom;
use fieldmask::{FieldMask, FieldMaskInput, Maskable};
#[derive(Debug, PartialEq, Maskable)]
struct Flat {
a: u32,
b: u32,
}
#[test]
fn flat() {
let mut struct1 = Flat { a: 1, b: 2 };
let struct2 = Flat { a: 3, b: 4 };
let expected_struct = Flat { a: 1, b: 4 };
FieldMask::try_from(FieldMaskInput(vec!["b"].into_iter()))
.expect("unable to deserialize mask")
.apply(&mut struct1, struct2);
assert_eq!(struct1, expected_struct);
}
#[test]
fn empty_mask() {
let mut struct1 = Flat { a: 1, b: 2 };
let struct2 = Flat { a: 3, b: 4 };
let expected_struct = Flat { a: 1, b: 2 };
FieldMask::try_from(FieldMaskInput(vec![].into_iter()))
.expect("unable to deserialize mask")
.apply(&mut struct1, struct2);
assert_eq!(struct1, expected_struct);
}
#[test]
fn nested_mask() {
assert_eq!(
FieldMask::<Flat>::try_from(FieldMaskInput(vec!["a.b"].into_iter()))
.expect_err("should fail to parse fieldmask")
.entry,
"a.b",
);
}
|
use juniper::graphql_object;
use super::Context;
#[allow(clippy::module_name_repetitions)]
pub struct RootMutation;
#[graphql_object(context = Context)]
impl RootMutation {
fn foo() -> &str {
"hello"
}
}
/*
#[graphql_object(context = Context, scalar = S)]
impl<S: ScalarValue + Display> Mutation {
fn createHuman(context: &Context, new_human: NewHuman) -> FieldResult<Human, S> {
let db = context
.pool
.get_connection()
.map_err(|e| e.map_scalar_value())?;
let human: Human = db
.insert_human(&new_human)
.map_err(|e| e.map_scalar_value())?;
Ok(human)
}
}
*/
|
// Copyright 2014-2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[rustfmt::skip]
#[warn(clippy::collapsible_if)]
fn main() {
let x = "hello";
let y = "world";
if x == "hello" {
if y == "world" {
println!("Hello world!");
}
}
if x == "hello" || x == "world" {
if y == "world" || y == "hello" {
println!("Hello world!");
}
}
if x == "hello" && x == "world" {
if y == "world" || y == "hello" {
println!("Hello world!");
}
}
if x == "hello" || x == "world" {
if y == "world" && y == "hello" {
println!("Hello world!");
}
}
if x == "hello" && x == "world" {
if y == "world" && y == "hello" {
println!("Hello world!");
}
}
if 42 == 1337 {
if 'a' != 'A' {
println!("world!")
}
}
// Collapse `else { if .. }` to `else if ..`
if x == "hello" {
print!("Hello ");
} else {
if y == "world" {
println!("world!")
}
}
if x == "hello" {
print!("Hello ");
} else {
if let Some(42) = Some(42) {
println!("world!")
}
}
if x == "hello" {
print!("Hello ");
} else {
if y == "world" {
println!("world")
}
else {
println!("!")
}
}
if x == "hello" {
print!("Hello ");
} else {
if let Some(42) = Some(42) {
println!("world")
}
else {
println!("!")
}
}
if let Some(42) = Some(42) {
print!("Hello ");
} else {
if let Some(42) = Some(42) {
println!("world")
}
else {
println!("!")
}
}
if let Some(42) = Some(42) {
print!("Hello ");
} else {
if x == "hello" {
println!("world")
}
else {
println!("!")
}
}
if let Some(42) = Some(42) {
print!("Hello ");
} else {
if let Some(42) = Some(42) {
println!("world")
}
else {
println!("!")
}
}
// Works because any if with an else statement cannot be collapsed.
if x == "hello" {
if y == "world" {
println!("Hello world!");
}
} else {
println!("Not Hello world");
}
if x == "hello" {
if y == "world" {
println!("Hello world!");
} else {
println!("Hello something else");
}
}
if x == "hello" {
print!("Hello ");
if y == "world" {
println!("world!")
}
}
if true {
} else {
assert!(true); // assert! is just an `if`
}
// The following tests check for the fix of https://github.com/rust-lang/rust-clippy/issues/798
if x == "hello" {// Not collapsible
if y == "world" {
println!("Hello world!");
}
}
if x == "hello" { // Not collapsible
if y == "world" {
println!("Hello world!");
}
}
if x == "hello" {
// Not collapsible
if y == "world" {
println!("Hello world!");
}
}
if x == "hello" {
if y == "world" { // Collapsible
println!("Hello world!");
}
}
if x == "hello" {
print!("Hello ");
} else {
// Not collapsible
if y == "world" {
println!("world!")
}
}
if x == "hello" {
print!("Hello ");
} else {
// Not collapsible
if let Some(42) = Some(42) {
println!("world!")
}
}
if x == "hello" {
/* Not collapsible */
if y == "world" {
println!("Hello world!");
}
}
if x == "hello" { /* Not collapsible */
if y == "world" {
println!("Hello world!");
}
}
}
|
use std::{
convert::TryFrom,
ffi::{CStr, CString},
fmt::{self, Debug},
fs::{read_to_string, File},
io::Write,
mem::{zeroed, ManuallyDrop},
num::NonZeroU32,
os::raw::{c_char, c_ulong, c_void},
ptr::slice_from_raw_parts_mut,
slice,
str::FromStr,
};
use nom::{
character::complete::{char, digit0, digit1, one_of, space1},
combinator::{map, map_res, opt, recognize},
sequence::{pair, separated_pair},
IResult,
};
use hltas::{read, types::*};
cfg_if::cfg_if! {
// Assume Linux bindings are good for macOS, and Windows GNU are good for Windows MSVC.
if #[cfg(all(unix, target_arch = "x86_64"))] {
#[allow(non_camel_case_types, non_snake_case, dead_code, deref_nullptr)]
#[path = "x86_64-unknown-linux-gnu.rs"]
pub mod hltas_cpp;
} else if #[cfg(all(unix, target_arch = "x86"))] {
#[allow(non_camel_case_types, non_snake_case, dead_code, deref_nullptr)]
#[path = "i686-unknown-linux-gnu.rs"]
pub mod hltas_cpp;
} else if #[cfg(all(windows, target_arch = "x86_64"))] {
#[allow(non_camel_case_types, non_snake_case, dead_code, deref_nullptr)]
#[path = "x86_64-pc-windows-gnu.rs"]
pub mod hltas_cpp;
} else if #[cfg(all(windows, target_arch = "x86"))] {
#[allow(non_camel_case_types, non_snake_case, dead_code, deref_nullptr)]
#[path = "i686-pc-windows-gnu.rs"]
pub mod hltas_cpp;
}
}
#[cfg(not(test))]
use hltas_cpp::{
hltas_input_get_frame, hltas_input_get_property, hltas_input_push_frame,
hltas_input_set_error_message, hltas_input_set_property,
};
// Windows cargo tests don't link without these stubs.
#[cfg(test)]
pub extern "C" fn hltas_input_set_property(
_input: *mut c_void,
_property: *const c_char,
_value: *const c_char,
) {
unreachable!()
}
#[cfg(test)]
pub extern "C" fn hltas_input_get_property(
_input: *const c_void,
_property: *const c_char,
) -> *const c_char {
unreachable!()
}
#[cfg(test)]
pub extern "C" fn hltas_input_push_frame(
_input: *mut c_void,
_frame: *const hltas_cpp::hltas_frame,
) {
unreachable!()
}
#[cfg(test)]
pub extern "C" fn hltas_input_get_frame(
_input: *const c_void,
_index: usize,
_frame: *mut hltas_cpp::hltas_frame,
) -> std::os::raw::c_int {
unreachable!()
}
#[cfg(test)]
pub extern "C" fn hltas_input_set_error_message(_input: *mut c_void, _message: *const c_char) {
unreachable!()
}
impl From<Button> for hltas_cpp::Button {
#[inline]
fn from(x: Button) -> Self {
use Button::*;
match x {
Forward => Self::FORWARD,
ForwardLeft => Self::FORWARD_LEFT,
Left => Self::LEFT,
BackLeft => Self::BACK_LEFT,
Back => Self::BACK,
BackRight => Self::BACK_RIGHT,
Right => Self::RIGHT,
ForwardRight => Self::FORWARD_RIGHT,
}
}
}
impl From<hltas_cpp::Button> for Button {
#[inline]
fn from(x: hltas_cpp::Button) -> Self {
use hltas_cpp::Button::*;
match x {
FORWARD => Self::Forward,
FORWARD_LEFT => Self::ForwardLeft,
LEFT => Self::Left,
BACK_LEFT => Self::BackLeft,
BACK => Self::Back,
BACK_RIGHT => Self::BackRight,
RIGHT => Self::Right,
FORWARD_RIGHT => Self::ForwardRight,
}
}
}
impl From<StrafeType> for hltas_cpp::StrafeType {
#[inline]
fn from(x: StrafeType) -> Self {
use StrafeType::*;
match x {
MaxAccel => Self::MAXACCEL,
MaxAngle => Self::MAXANGLE,
MaxDeccel => Self::MAXDECCEL,
ConstSpeed => Self::CONSTSPEED,
ConstYawspeed(_) => Self::CONSTYAWSPEED,
}
}
}
impl From<hltas_cpp::StrafeType> for StrafeType {
#[inline]
fn from(x: hltas_cpp::StrafeType) -> Self {
use hltas_cpp::StrafeType::*;
match x {
MAXACCEL => Self::MaxAccel,
MAXANGLE => Self::MaxAngle,
MAXDECCEL => Self::MaxDeccel,
CONSTSPEED => Self::ConstSpeed,
CONSTYAWSPEED => Self::ConstYawspeed(0.),
}
}
}
impl From<read::Context> for hltas_cpp::ErrorCode {
#[inline]
fn from(x: read::Context) -> Self {
use hltas_cpp::ErrorCode::*;
use read::Context::*;
match x {
ErrorReadingVersion => FAILVER,
VersionTooHigh => NOTSUPPORTED,
BothAutoJumpAndDuckTap => BOTHAJDT,
NoLeaveGroundAction => NOLGAGSTACTION,
TimesOnLeaveGroundAction => LGAGSTACTIONTIMES,
NoSaveName => NOSAVENAME,
NoSeed => NOSEED,
NoYaw => NOYAW,
NoButtons => NOBUTTONS,
NoLGAGSTMinSpeed => NOLGAGSTMINSPEED,
NoResetSeed => NORESETSEED,
ErrorParsingLine => FAILFRAME,
InvalidStrafingAlgorithm => INVALID_ALGORITHM,
NoConstraints => MISSING_CONSTRAINTS,
NoPlusMinusBeforeTolerance => NO_PM_IN_TOLERANCE,
NoFromToParameters => MISSING_ALGORITHM_FROMTO_PARAMETERS,
NoTo => NO_TO_IN_FROMTO_ALGORITHM,
NoYawspeed => NO_YAWSPEED,
UnsupportedConstantYawspeedDir => UNSUPPORTED_YAWSPEED_DIR,
NegativeYawspeed => NEGATIVE_YAWSPEED_VALUE,
}
}
}
impl From<VectorialStrafingConstraints> for hltas_cpp::AlgorithmParameters {
#[inline]
fn from(x: VectorialStrafingConstraints) -> Self {
use hltas_cpp::{
AlgorithmParameters__bindgen_ty_1, AlgorithmParameters__bindgen_ty_1__bindgen_ty_1,
AlgorithmParameters__bindgen_ty_1__bindgen_ty_2,
AlgorithmParameters__bindgen_ty_1__bindgen_ty_3,
AlgorithmParameters__bindgen_ty_1__bindgen_ty_4,
AlgorithmParameters__bindgen_ty_1__bindgen_ty_5,
AlgorithmParameters__bindgen_ty_1__bindgen_ty_6, ConstraintsType,
};
use VectorialStrafingConstraints::*;
match x {
VelocityYaw { tolerance } => Self {
Type: ConstraintsType::VELOCITY,
Parameters: AlgorithmParameters__bindgen_ty_1 {
Velocity: AlgorithmParameters__bindgen_ty_1__bindgen_ty_1 {
Constraints: tolerance as f64,
},
},
},
AvgVelocityYaw { tolerance } => Self {
Type: ConstraintsType::VELOCITY_AVG,
Parameters: AlgorithmParameters__bindgen_ty_1 {
VelocityAvg: AlgorithmParameters__bindgen_ty_1__bindgen_ty_2 {
Constraints: tolerance as f64,
},
},
},
VelocityYawLocking { tolerance } => Self {
Type: ConstraintsType::VELOCITY_LOCK,
Parameters: AlgorithmParameters__bindgen_ty_1 {
VelocityLock: AlgorithmParameters__bindgen_ty_1__bindgen_ty_3 {
Constraints: tolerance as f64,
},
},
},
Yaw { yaw, tolerance } => Self {
Type: ConstraintsType::YAW,
Parameters: AlgorithmParameters__bindgen_ty_1 {
Yaw: AlgorithmParameters__bindgen_ty_1__bindgen_ty_4 {
Yaw: yaw as f64,
Constraints: tolerance as f64,
},
},
},
YawRange { from, to } => Self {
Type: ConstraintsType::YAW_RANGE,
Parameters: AlgorithmParameters__bindgen_ty_1 {
YawRange: AlgorithmParameters__bindgen_ty_1__bindgen_ty_5 {
LowestYaw: from as f64,
HighestYaw: to as f64,
},
},
},
LookAt { entity, x, y, z } => Self {
Type: ConstraintsType::LOOK_AT,
Parameters: AlgorithmParameters__bindgen_ty_1 {
LookAt: AlgorithmParameters__bindgen_ty_1__bindgen_ty_6 {
Entity: match entity {
Some(number) => number.get(),
None => 0,
},
X: x as f64,
Y: y as f64,
Z: z as f64,
},
},
},
}
}
}
impl From<hltas_cpp::AlgorithmParameters> for VectorialStrafingConstraints {
#[inline]
fn from(x: hltas_cpp::AlgorithmParameters) -> Self {
use hltas_cpp::ConstraintsType;
unsafe {
match x.Type {
ConstraintsType::VELOCITY => Self::VelocityYaw {
tolerance: x.Parameters.Velocity.Constraints as f32,
},
ConstraintsType::VELOCITY_AVG => Self::AvgVelocityYaw {
tolerance: x.Parameters.VelocityAvg.Constraints as f32,
},
ConstraintsType::VELOCITY_LOCK => Self::VelocityYawLocking {
tolerance: x.Parameters.VelocityLock.Constraints as f32,
},
ConstraintsType::YAW => Self::Yaw {
yaw: x.Parameters.Yaw.Yaw as f32,
tolerance: x.Parameters.Yaw.Constraints as f32,
},
ConstraintsType::YAW_RANGE => Self::YawRange {
from: x.Parameters.YawRange.LowestYaw as f32,
to: x.Parameters.YawRange.HighestYaw as f32,
},
ConstraintsType::LOOK_AT => Self::LookAt {
entity: NonZeroU32::new(x.Parameters.LookAt.Entity),
x: x.Parameters.LookAt.X as f32,
y: x.Parameters.LookAt.Y as f32,
z: x.Parameters.LookAt.Z as f32,
},
}
}
}
}
impl From<ChangeTarget> for hltas_cpp::ChangeTarget {
#[inline]
fn from(x: ChangeTarget) -> Self {
use ChangeTarget::*;
match x {
Yaw => Self::YAW,
Pitch => Self::PITCH,
VectorialStrafingYaw => Self::TARGET_YAW,
VectorialStrafingYawOffset => Self::TARGET_YAW_OFFSET,
}
}
}
impl From<hltas_cpp::ChangeTarget> for ChangeTarget {
#[inline]
fn from(x: hltas_cpp::ChangeTarget) -> Self {
use hltas_cpp::ChangeTarget::*;
match x {
YAW => Self::Yaw,
PITCH => Self::Pitch,
TARGET_YAW => Self::VectorialStrafingYaw,
TARGET_YAW_OFFSET => Self::VectorialStrafingYawOffset,
}
}
}
impl Default for hltas_cpp::StrafingAlgorithm {
#[inline]
fn default() -> Self {
Self::YAW
}
}
impl Default for hltas_cpp::AlgorithmParameters {
#[inline]
fn default() -> Self {
Self {
Type: hltas_cpp::ConstraintsType::YAW,
Parameters: hltas_cpp::AlgorithmParameters__bindgen_ty_1 {
Yaw: hltas_cpp::AlgorithmParameters__bindgen_ty_1__bindgen_ty_4 {
Yaw: 0.,
Constraints: 180.,
},
},
}
}
}
impl Debug for hltas_cpp::AlgorithmParameters {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut builder = f.debug_struct("AlgorithmParameters");
builder.field("Type", &self.Type);
use hltas_cpp::ConstraintsType;
let field: &dyn Debug = unsafe {
match self.Type {
ConstraintsType::VELOCITY => &self.Parameters.Velocity,
ConstraintsType::VELOCITY_AVG => &self.Parameters.VelocityAvg,
ConstraintsType::VELOCITY_LOCK => &self.Parameters.VelocityLock,
ConstraintsType::YAW => &self.Parameters.Yaw,
ConstraintsType::YAW_RANGE => &self.Parameters.YawRange,
ConstraintsType::LOOK_AT => &self.Parameters.LookAt,
}
};
builder.field("Parameters", field);
builder.finish()
}
}
impl Default for hltas_cpp::Button {
#[inline]
fn default() -> Self {
Self::FORWARD
}
}
#[allow(clippy::derivable_impls)]
impl Default for hltas_cpp::StrafeButtons {
#[inline]
fn default() -> Self {
Self {
AirLeft: hltas_cpp::Button::default(),
AirRight: hltas_cpp::Button::default(),
GroundLeft: hltas_cpp::Button::default(),
GroundRight: hltas_cpp::Button::default(),
}
}
}
// Copied from hltas::read.
fn non_zero_u32(i: &str) -> IResult<&str, NonZeroU32> {
map_res(
recognize(pair(one_of("123456789"), digit0)),
NonZeroU32::from_str,
)(i)
}
// Three functions copied from hltas::read::properties.
fn shared_seed(i: &str) -> IResult<&str, u32> {
map_res(digit1, u32::from_str)(i)
}
fn non_shared_seed(i: &str) -> IResult<&str, i64> {
map_res(recognize(pair(opt(char('-')), digit1)), i64::from_str)(i)
}
fn seeds(i: &str) -> IResult<&str, Seeds> {
map(
separated_pair(shared_seed, space1, non_shared_seed),
|(shared, non_shared)| Seeds { shared, non_shared },
)(i)
}
/// Data which a `hltas_frame` has pointers to.
#[derive(Default)]
pub struct AllocatedData {
#[allow(dead_code)]
frame_time: Option<CString>,
#[allow(dead_code)]
console_command: Option<CString>,
#[allow(dead_code)]
save_name: Option<CString>,
#[allow(dead_code)]
yaws: Option<Box<[f32]>>,
}
/// Converts a non-comment line to a `hltas_frame`.
///
/// `AllocatedStrings` contains strings to which the pointers in `hltas_frame` point.
///
/// # Safety
///
/// `AllocatedStrings` must not be dropped before `hltas_frame`.
///
/// # Panics
///
/// Panics if `line` is `Line::Comment`.
pub unsafe fn hltas_frame_from_non_comment_line(
line: &Line,
) -> (hltas_cpp::hltas_frame, ManuallyDrop<AllocatedData>) {
let mut frame: hltas_cpp::hltas_frame = zeroed();
let mut allocated = AllocatedData::default();
match line {
Line::Comment(_) => panic!("can't convert a comment line"),
Line::FrameBulk(frame_bulk) => {
match frame_bulk.auto_actions.movement {
Some(AutoMovement::SetYaw(yaw)) => {
frame.YawPresent = true;
frame.Yaw = f64::from(yaw);
}
Some(AutoMovement::Strafe(StrafeSettings { type_, dir })) => {
frame.Strafe = true;
frame.Type = type_.into();
if let StrafeType::ConstYawspeed(yawspeed) = type_ {
frame.YawPresent = true;
frame.Yawspeed = f64::from(yawspeed);
}
match dir {
StrafeDir::Left => {
frame.Dir = hltas_cpp::StrafeDir::LEFT;
}
StrafeDir::Right => {
frame.Dir = hltas_cpp::StrafeDir::RIGHT;
}
StrafeDir::Best => {
frame.Dir = hltas_cpp::StrafeDir::BEST;
}
StrafeDir::Yaw(yaw) => {
frame.Dir = hltas_cpp::StrafeDir::YAW;
frame.YawPresent = true;
frame.Yaw = f64::from(yaw);
}
StrafeDir::Point { x, y } => {
frame.Dir = hltas_cpp::StrafeDir::POINT;
frame.YawPresent = true;
frame.X = f64::from(x);
frame.Y = f64::from(y);
}
StrafeDir::Line { yaw } => {
frame.Dir = hltas_cpp::StrafeDir::LINE;
frame.YawPresent = true;
frame.Yaw = f64::from(yaw);
}
StrafeDir::LeftRight(count) => {
frame.Dir = hltas_cpp::StrafeDir::LEFT_RIGHT;
frame.YawPresent = true;
frame.Count = count.get();
}
StrafeDir::RightLeft(count) => {
frame.Dir = hltas_cpp::StrafeDir::RIGHT_LEFT;
frame.YawPresent = true;
frame.Count = count.get();
}
}
}
None => {}
}
if let Some(leave_ground_action) = frame_bulk.auto_actions.leave_ground_action {
match leave_ground_action.speed {
LeaveGroundActionSpeed::Optimal => frame.Lgagst = true,
LeaveGroundActionSpeed::OptimalWithFullMaxspeed => {
frame.Lgagst = true;
frame.LgagstFullMaxspeed = true;
}
LeaveGroundActionSpeed::Any => {}
}
if frame.Lgagst {
frame.LgagstTimes = leave_ground_action.times.into();
}
match leave_ground_action.type_ {
LeaveGroundActionType::Jump => {
frame.Autojump = true;
if !frame.Lgagst {
frame.AutojumpTimes = leave_ground_action.times.into();
}
}
LeaveGroundActionType::DuckTap { zero_ms } => {
frame.Ducktap = true;
frame.Ducktap0ms = zero_ms;
if !frame.Lgagst {
frame.DucktapTimes = leave_ground_action.times.into();
}
}
}
}
if let Some(JumpBug { times }) = frame_bulk.auto_actions.jump_bug {
frame.Jumpbug = true;
frame.JumpbugTimes = times.into();
}
if let Some(DuckBeforeCollision {
including_ceilings,
times,
}) = frame_bulk.auto_actions.duck_before_collision
{
frame.Dbc = true;
frame.DbcCeilings = including_ceilings;
frame.DbcTimes = times.into();
}
if let Some(DuckBeforeGround { times }) = frame_bulk.auto_actions.duck_before_ground {
frame.Dbg = true;
frame.DbgTimes = times.into();
}
if let Some(DuckWhenJump { times }) = frame_bulk.auto_actions.duck_when_jump {
frame.Dwj = true;
frame.DwjTimes = times.into();
}
frame.Forward = frame_bulk.movement_keys.forward;
frame.Left = frame_bulk.movement_keys.left;
frame.Right = frame_bulk.movement_keys.right;
frame.Back = frame_bulk.movement_keys.back;
frame.Up = frame_bulk.movement_keys.up;
frame.Down = frame_bulk.movement_keys.down;
frame.Jump = frame_bulk.action_keys.jump;
frame.Duck = frame_bulk.action_keys.duck;
frame.Use = frame_bulk.action_keys.use_;
frame.Attack1 = frame_bulk.action_keys.attack_1;
frame.Attack2 = frame_bulk.action_keys.attack_2;
frame.Reload = frame_bulk.action_keys.reload;
let frame_time = CString::new(frame_bulk.frame_time.to_string()).unwrap();
frame.Frametime = frame_time.as_ptr();
allocated.frame_time = Some(frame_time);
if let Some(pitch) = frame_bulk.pitch {
frame.PitchPresent = true;
frame.Pitch = f64::from(pitch);
}
frame.Repeats = frame_bulk.frame_count.get();
if let Some(console_command) = frame_bulk.console_command.as_ref() {
let console_command_cstring = CString::new(console_command.to_string()).unwrap();
frame.Commands = console_command_cstring.as_ptr();
allocated.console_command = Some(console_command_cstring);
}
}
Line::Save(save_name) => {
let save_name = CString::new(save_name.to_string()).unwrap();
frame.SaveName = save_name.as_ptr();
allocated.save_name = Some(save_name);
}
Line::SharedSeed(seed) => {
frame.SeedPresent = true;
frame.Seed = *seed;
}
Line::Buttons(buttons) => match *buttons {
Buttons::Reset => frame.BtnState = hltas_cpp::ButtonState::CLEAR,
Buttons::Set {
air_left,
air_right,
ground_left,
ground_right,
} => {
frame.BtnState = hltas_cpp::ButtonState::SET;
frame.Buttons.AirLeft = air_left.into();
frame.Buttons.AirRight = air_right.into();
frame.Buttons.GroundLeft = ground_left.into();
frame.Buttons.GroundRight = ground_right.into();
}
},
Line::LGAGSTMinSpeed(lgagst_min_speed) => {
frame.LgagstMinSpeedPresent = true;
frame.LgagstMinSpeed = *lgagst_min_speed;
}
Line::Reset { non_shared_seed } => {
frame.ResetFrame = true;
frame.ResetNonSharedRNGSeed = *non_shared_seed;
}
Line::VectorialStrafing(enabled) => {
frame.StrafingAlgorithmPresent = true;
frame.Algorithm = if *enabled {
hltas_cpp::StrafingAlgorithm::VECTORIAL
} else {
hltas_cpp::StrafingAlgorithm::YAW
};
}
Line::VectorialStrafingConstraints(constraints) => {
frame.AlgorithmParametersPresent = true;
frame.Parameters = (*constraints).into();
}
Line::Change(change) => {
frame.ChangePresent = true;
frame.Target = change.target.into();
frame.ChangeFinalValue = change.final_value;
frame.ChangeOver = change.over;
}
Line::TargetYawOverride(yaws) => {
let yaws: Box<[f32]> = yaws.to_owned().into();
frame.TargetYawOverride = yaws.as_ptr();
frame.TargetYawOverrideCount = yaws.len();
allocated.yaws = Some(yaws);
}
Line::RenderYawOverride(yaws) => {
let yaws: Box<[f32]> = yaws.to_owned().into();
frame.RenderYawOverride = yaws.as_ptr();
frame.RenderYawOverrideCount = yaws.len();
allocated.yaws = Some(yaws);
}
}
(frame, ManuallyDrop::new(allocated))
}
unsafe fn hltas_rs_from_str(input: *mut c_void, script: &str) -> hltas_cpp::ErrorDescription {
match HLTAS::from_str(script) {
Ok(hltas) => {
if let Some(demo) = hltas.properties.demo {
let demo = CString::new(demo).unwrap();
hltas_input_set_property(
input,
b"demo\0" as *const u8 as *const c_char,
demo.as_ptr(),
);
}
if let Some(save) = hltas.properties.save {
let save = CString::new(save).unwrap();
hltas_input_set_property(
input,
b"save\0" as *const u8 as *const c_char,
save.as_ptr(),
);
}
if let Some(frametime_0ms) = hltas.properties.frametime_0ms {
let frametime_0ms = CString::new(frametime_0ms).unwrap();
hltas_input_set_property(
input,
b"frametime0ms\0" as *const u8 as *const c_char,
frametime_0ms.as_ptr(),
);
}
if let Some(seeds) = hltas.properties.seeds {
let seeds = format!("{} {}", seeds.shared, seeds.non_shared);
let seeds = CString::new(seeds).unwrap();
hltas_input_set_property(
input,
b"seed\0" as *const u8 as *const c_char,
seeds.as_ptr(),
);
}
if let Some(hlstrafe_version) = hltas.properties.hlstrafe_version {
let hlstrafe_version = CString::new(hlstrafe_version.to_string()).unwrap();
hltas_input_set_property(
input,
b"hlstrafe_version\0" as *const u8 as *const c_char,
hlstrafe_version.as_ptr(),
);
}
if let Some(load_command) = hltas.properties.load_command {
let load_command = CString::new(load_command).unwrap();
hltas_input_set_property(
input,
b"load_command\0" as *const u8 as *const c_char,
load_command.as_ptr(),
);
}
let mut comments = String::new();
for line in hltas.lines {
match line {
Line::Comment(comment) => {
comments.push_str(&comment);
comments.push('\n');
}
line => {
let (mut frame, mut allocated) = hltas_frame_from_non_comment_line(&line);
let comments_cstring = CString::new(comments).unwrap();
frame.Comments = comments_cstring.as_ptr();
comments = String::new();
hltas_input_push_frame(input, &frame);
ManuallyDrop::drop(&mut allocated);
}
}
}
hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::OK,
LineNumber: 0,
}
}
Err(error) => {
let code = error
.context
.map(hltas_cpp::ErrorCode::from)
.unwrap_or(hltas_cpp::ErrorCode::FAILLINE);
let message = format!("{}", error);
if let Ok(message) = CString::new(message) {
hltas_input_set_error_message(input, message.as_ptr());
}
hltas_cpp::ErrorDescription {
Code: code,
LineNumber: error.line() as u32,
}
}
}
}
/// Parses the HLTAS from `script` and writes it into `input`.
///
/// This is meant to be used internally from the C++ HLTAS library.
///
/// # Safety
///
/// `input` must be a valid `HLTAS::Input`, `script` must be a valid null-terminated string.
#[no_mangle]
pub unsafe extern "C" fn hltas_rs_from_string(
input: *mut c_void,
script: *const c_char,
) -> hltas_cpp::ErrorDescription {
let script = match CStr::from_ptr(script).to_str() {
Ok(x) => x,
Err(_) => {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILOPEN,
LineNumber: 0,
};
}
};
hltas_rs_from_str(input, script)
}
/// Reads the HLTAS from `filename` and writes it into `input`.
///
/// This is meant to be used internally from the C++ HLTAS library.
///
/// # Safety
///
/// `input` must be a valid `HLTAS::Input`, `filename` must be a valid null-terminated string.
#[no_mangle]
pub unsafe extern "C" fn hltas_rs_read(
input: *mut c_void,
filename: *const c_char,
) -> hltas_cpp::ErrorDescription {
if let Ok(filename) = CStr::from_ptr(filename).to_str() {
if let Ok(contents) = read_to_string(filename) {
hltas_rs_from_str(input, &contents)
} else {
hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILOPEN,
LineNumber: 0,
}
}
} else {
hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILOPEN,
LineNumber: 0,
}
}
}
unsafe fn hltas_rs_to_writer(
input: *const c_void,
writer: impl Write,
) -> hltas_cpp::ErrorDescription {
let demo = hltas_input_get_property(input, b"demo\0" as *const u8 as *const c_char);
let demo = if demo.is_null() {
None
} else if let Ok(demo) = CStr::from_ptr(demo).to_str() {
Some(demo)
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
};
let save = hltas_input_get_property(input, b"save\0" as *const u8 as *const c_char);
let save = if save.is_null() {
None
} else if let Ok(save) = CStr::from_ptr(save).to_str() {
Some(save)
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
};
let seed = hltas_input_get_property(input, b"seed\0" as *const u8 as *const c_char);
let seeds = if seed.is_null() {
None
} else if let Ok(seed) = CStr::from_ptr(seed).to_str() {
if let Ok((_, seeds)) = seeds(seed) {
Some(seeds)
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
}
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
};
let frametime_0ms =
hltas_input_get_property(input, b"frametime0ms\0" as *const u8 as *const c_char);
let frametime_0ms = if frametime_0ms.is_null() {
None
} else if let Ok(frametime_0ms) = CStr::from_ptr(frametime_0ms).to_str() {
Some(frametime_0ms)
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
};
let hlstrafe_version =
hltas_input_get_property(input, b"hlstrafe_version\0" as *const u8 as *const c_char);
let hlstrafe_version = if hlstrafe_version.is_null() {
None
} else if let Ok(hlstrafe_version) = CStr::from_ptr(hlstrafe_version).to_str() {
if let Ok((_, hlstrafe_version)) = non_zero_u32(hlstrafe_version) {
Some(hlstrafe_version)
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
}
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
};
let load_command =
hltas_input_get_property(input, b"load_command\0" as *const u8 as *const c_char);
let load_command = if load_command.is_null() {
None
} else if let Ok(load_command) = CStr::from_ptr(load_command).to_str() {
Some(load_command)
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
};
let mut hltas = HLTAS {
properties: Properties {
demo: demo.map(ToOwned::to_owned),
save: save.map(ToOwned::to_owned),
seeds,
frametime_0ms: frametime_0ms.map(ToOwned::to_owned),
hlstrafe_version,
load_command: load_command.map(ToOwned::to_owned),
},
lines: Vec::new(),
};
let mut index = 0;
loop {
let mut frame = zeroed();
if hltas_input_get_frame(input, index, &mut frame) != 0 {
break;
}
index += 1;
if !frame.Comments.is_null() {
let comments = if let Ok(comments) = CStr::from_ptr(frame.Comments).to_str() {
comments
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
};
for line in comments.lines() {
hltas.lines.push(Line::Comment(line.to_owned()));
}
}
if !frame.SaveName.is_null() {
let save = if let Ok(save) = CStr::from_ptr(frame.SaveName).to_str() {
save
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
};
hltas.lines.push(Line::Save(save.to_owned()));
continue;
}
if frame.SeedPresent {
hltas.lines.push(Line::SharedSeed(frame.Seed));
continue;
}
if frame.BtnState != hltas_cpp::ButtonState::NOTHING {
let line = if frame.BtnState == hltas_cpp::ButtonState::SET {
Line::Buttons(Buttons::Set {
air_left: frame.Buttons.AirLeft.into(),
air_right: frame.Buttons.AirRight.into(),
ground_left: frame.Buttons.GroundLeft.into(),
ground_right: frame.Buttons.GroundRight.into(),
})
} else {
Line::Buttons(Buttons::Reset)
};
hltas.lines.push(line);
continue;
}
if frame.LgagstMinSpeedPresent {
hltas.lines.push(Line::LGAGSTMinSpeed(frame.LgagstMinSpeed));
continue;
}
if frame.ResetFrame {
hltas.lines.push(Line::Reset {
non_shared_seed: frame.ResetNonSharedRNGSeed,
});
continue;
}
if frame.StrafingAlgorithmPresent {
hltas.lines.push(Line::VectorialStrafing(
frame.Algorithm == hltas_cpp::StrafingAlgorithm::VECTORIAL,
));
continue;
}
if frame.AlgorithmParametersPresent {
hltas
.lines
.push(Line::VectorialStrafingConstraints(frame.Parameters.into()));
continue;
}
if frame.ChangePresent {
hltas.lines.push(Line::Change(Change {
target: frame.Target.into(),
final_value: frame.ChangeFinalValue,
over: frame.ChangeOver,
}));
continue;
}
if frame.TargetYawOverrideCount != 0 {
let yaws = slice::from_raw_parts(frame.TargetYawOverride, frame.TargetYawOverrideCount);
hltas.lines.push(Line::TargetYawOverride(yaws.to_owned()));
continue;
}
if frame.RenderYawOverrideCount != 0 {
let yaws = slice::from_raw_parts(frame.RenderYawOverride, frame.RenderYawOverrideCount);
hltas.lines.push(Line::RenderYawOverride(yaws.to_owned()));
continue;
}
let movement = if frame.Strafe {
use hltas_cpp::StrafeDir::*;
Some(AutoMovement::Strafe(StrafeSettings {
type_: match frame.Type {
hltas_cpp::StrafeType::CONSTYAWSPEED => {
StrafeType::ConstYawspeed(frame.Yawspeed as f32)
}
_ => frame.Type.into(),
},
dir: match frame.Dir {
LEFT => StrafeDir::Left,
RIGHT => StrafeDir::Right,
BEST => StrafeDir::Best,
YAW => StrafeDir::Yaw(frame.Yaw as f32),
POINT => StrafeDir::Point {
x: frame.X as f32,
y: frame.Y as f32,
},
LINE => StrafeDir::Line {
yaw: frame.Yaw as f32,
},
LEFT_RIGHT => {
StrafeDir::LeftRight(if let Some(count) = NonZeroU32::new(frame.Count) {
count
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
})
}
RIGHT_LEFT => {
StrafeDir::RightLeft(if let Some(count) = NonZeroU32::new(frame.Count) {
count
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
})
}
},
}))
} else if frame.YawPresent {
Some(AutoMovement::SetYaw(frame.Yaw as f32))
} else {
None
};
let leave_ground_action = if frame.Lgagst {
let speed = if frame.LgagstFullMaxspeed {
LeaveGroundActionSpeed::OptimalWithFullMaxspeed
} else {
LeaveGroundActionSpeed::Optimal
};
if frame.Autojump {
Some(LeaveGroundAction {
speed,
times: frame.LgagstTimes.into(),
type_: LeaveGroundActionType::Jump,
})
} else {
Some(LeaveGroundAction {
speed,
times: frame.LgagstTimes.into(),
type_: LeaveGroundActionType::DuckTap {
zero_ms: frame.Ducktap0ms,
},
})
}
} else if frame.Autojump {
Some(LeaveGroundAction {
speed: LeaveGroundActionSpeed::Any,
times: frame.AutojumpTimes.into(),
type_: LeaveGroundActionType::Jump,
})
} else if frame.Ducktap {
Some(LeaveGroundAction {
speed: LeaveGroundActionSpeed::Any,
times: frame.DucktapTimes.into(),
type_: LeaveGroundActionType::DuckTap {
zero_ms: frame.Ducktap0ms,
},
})
} else {
None
};
let jump_bug = if frame.Jumpbug {
Some(JumpBug {
times: frame.JumpbugTimes.into(),
})
} else {
None
};
let duck_before_collision = if frame.Dbc {
Some(DuckBeforeCollision {
times: frame.DbcTimes.into(),
including_ceilings: frame.DbcCeilings,
})
} else {
None
};
let duck_before_ground = if frame.Dbg {
Some(DuckBeforeGround {
times: frame.DbgTimes.into(),
})
} else {
None
};
let duck_when_jump = if frame.Dwj {
Some(DuckWhenJump {
times: frame.DwjTimes.into(),
})
} else {
None
};
let forward = frame.Forward;
let left = frame.Left;
let right = frame.Right;
let back = frame.Back;
let up = frame.Up;
let down = frame.Down;
let jump = frame.Jump;
let duck = frame.Duck;
let use_ = frame.Use;
let attack_1 = frame.Attack1;
let attack_2 = frame.Attack2;
let reload = frame.Reload;
let frame_time = if let Ok(frametime) = CStr::from_ptr(frame.Frametime).to_str() {
frametime
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
};
let pitch = if frame.PitchPresent {
Some(frame.Pitch as f32)
} else {
None
};
let frame_count = if let Some(frame_count) = NonZeroU32::new(frame.Repeats) {
frame_count
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
};
let console_command = if frame.Commands.is_null() {
None
} else if let Ok(commands) = CStr::from_ptr(frame.Commands).to_str() {
Some(commands)
} else {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
};
let frame_bulk = FrameBulk {
auto_actions: AutoActions {
movement,
leave_ground_action,
jump_bug,
duck_before_collision,
duck_before_ground,
duck_when_jump,
},
movement_keys: MovementKeys {
forward,
left,
right,
back,
up,
down,
},
action_keys: ActionKeys {
jump,
duck,
use_,
attack_1,
attack_2,
reload,
},
frame_time: frame_time.to_owned(),
pitch,
frame_count,
console_command: console_command.map(ToOwned::to_owned),
};
hltas.lines.push(Line::FrameBulk(frame_bulk));
}
if hltas.to_writer(writer).is_err() {
hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
}
} else {
hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::OK,
LineNumber: 0,
}
}
}
/// Writes the HLTAS from `input` to `script`.
///
/// This is meant to be used internally from the C++ HLTAS library.
///
/// # Safety
///
/// `input` must be a valid `HLTAS::Input`, `script` must be a valid pointer to a `size`-long array.
#[no_mangle]
pub unsafe extern "C" fn hltas_rs_to_string(
input: *const c_void,
script: *mut c_char,
size: c_ulong,
) -> hltas_cpp::ErrorDescription {
let size = match usize::try_from(size) {
Ok(x) => x,
Err(_) => {
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
}
}
};
let mut buf = &mut *slice_from_raw_parts_mut(script.cast(), size);
let rv = hltas_rs_to_writer(input, &mut buf);
if rv.Code == hltas_cpp::ErrorCode::OK {
if buf.is_empty() {
// No space left for the NULL byte.
return hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILWRITE,
LineNumber: 0,
};
}
// Write the terminating NULL byte.
buf[0] = 0;
}
rv
}
/// Writes the HLTAS from `input` to `filename`.
///
/// This is meant to be used internally from the C++ HLTAS library.
///
/// # Safety
///
/// `input` must be a valid `HLTAS::Input`, `filename` must be a valid null-terminated string.
#[no_mangle]
pub unsafe extern "C" fn hltas_rs_write(
input: *const c_void,
filename: *const c_char,
) -> hltas_cpp::ErrorDescription {
if let Ok(filename) = CStr::from_ptr(filename).to_str() {
if let Ok(file) = File::create(filename) {
hltas_rs_to_writer(input, file)
} else {
hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILOPEN,
LineNumber: 0,
}
}
} else {
hltas_cpp::ErrorDescription {
Code: hltas_cpp::ErrorCode::FAILOPEN,
LineNumber: 0,
}
}
}
|
// a complete example of DList
fn main() {
} |
use serenity::{
client::Context,
model::prelude::*,
framework::standard::{
Args, CommandResult, macros::command
}
};
#[command]
pub async fn multiply(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {
let one = args.single::<f64>()?;
let two = args.single::<f64>()?;
let product = one * two;
msg.channel_id.say(&ctx.http, product).await?;
Ok(())
}
#[command]
pub async fn add(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {
let one = args.single::<f64>()?;
let two = args.single::<f64>()?;
let sum = one + two;
msg.channel_id.say(&ctx.http, sum).await?;
Ok(())
}
#[command]
pub async fn compute(ctx: &Context, msg: &Message, args: Args) -> CommandResult {
let all = args.raw().collect::<Vec<&str>>().join(" ");
let terms: Vec<&str> = all.split("+").collect();
let mut sum : f64 = 0.0;
for term in terms {
let factors: Vec<&str> = term.split("*").collect();
let mut product : f64 = 1.0;
for factor in factors {
product *= factor.trim().parse::<f64>()?;
}
sum += product;
}
msg.channel_id.say(&ctx.http, format!("It makes: `{}`", sum)).await?;
Ok(())
} |
use super::ProfileSize;
use crate::{
commands::{check_user_mention, parse_discord, parse_mode_option, DoubleResultCow},
database::UserConfig,
error::Error,
util::{
constants::common_literals::{DISCORD, MODE, NAME},
ApplicationCommandExt, CowUtils, InteractionExt,
},
Args, Context,
};
use twilight_model::{
application::interaction::{application_command::CommandOptionValue, ApplicationCommand},
id::{marker::UserMarker, Id},
};
pub(super) struct ProfileArgs {
pub config: UserConfig,
}
impl ProfileArgs {
pub(super) async fn args(
ctx: &Context,
args: &mut Args<'_>,
author_id: Id<UserMarker>,
) -> DoubleResultCow<Self> {
let mut config = ctx.user_config(author_id).await?;
for arg in args.take(2).map(CowUtils::cow_to_ascii_lowercase) {
if let Some(idx) = arg.find('=').filter(|&i| i > 0) {
let key = &arg[..idx];
let value = &arg[idx + 1..];
match key {
"size" => {
config.profile_size = match value {
"compact" | "small" => Some(ProfileSize::Compact),
"medium" => Some(ProfileSize::Medium),
"full" | "big" => Some(ProfileSize::Full),
_ => {
let content = "Failed to parse `size`. Must be either `compact`, `medium`, or `full`.";
return Ok(Err(content.into()));
}
};
}
_ => {
let content = format!(
"Unrecognized option `{key}`.\n\
Available options are: `size`."
);
return Ok(Err(content.into()));
}
}
} else {
match check_user_mention(ctx, arg.as_ref()).await? {
Ok(osu) => config.osu = Some(osu),
Err(content) => return Ok(Err(content)),
}
}
}
Ok(Ok(Self { config }))
}
pub(super) async fn slash(
ctx: &Context,
command: &mut ApplicationCommand,
) -> DoubleResultCow<Self> {
let mut config = ctx.user_config(command.user_id()?).await?;
for option in command.yoink_options() {
match option.value {
CommandOptionValue::String(value) => match option.name.as_str() {
MODE => config.mode = parse_mode_option(&value),
"size" => match value.as_str() {
"compact" => config.profile_size = Some(ProfileSize::Compact),
"medium" => config.profile_size = Some(ProfileSize::Medium),
"full" => config.profile_size = Some(ProfileSize::Full),
_ => return Err(Error::InvalidCommandOptions),
},
NAME => config.osu = Some(value.into()),
_ => return Err(Error::InvalidCommandOptions),
},
CommandOptionValue::User(value) => match option.name.as_str() {
DISCORD => match parse_discord(ctx, value).await? {
Ok(osu) => config.osu = Some(osu),
Err(content) => return Ok(Err(content)),
},
_ => return Err(Error::InvalidCommandOptions),
},
_ => return Err(Error::InvalidCommandOptions),
}
}
Ok(Ok(Self { config }))
}
}
|
extern crate oxygengine_core as core;
pub mod ast;
pub mod resource;
pub mod system;
pub mod vm;
#[cfg(test)]
mod tests;
pub mod prelude {
pub use crate::{ast::*, resource::*, system::*, vm::*};
}
use crate::system::{flow_script_system, FlowScriptSystemResources};
use core::{
app::AppBuilder,
ecs::pipeline::{PipelineBuilder, PipelineBuilderError},
id::ID,
};
pub type Guid = ID<()>;
pub fn bundle_installer<PB>(builder: &mut AppBuilder<PB>, _: ()) -> Result<(), PipelineBuilderError>
where
PB: PipelineBuilder,
{
builder.install_system::<FlowScriptSystemResources>("flow", flow_script_system, &[])?;
Ok(())
}
|
extern crate csv;
use std::{
error::Error,
fs::File,
ops::{Index, IndexMut, Range},
path::Path,
};
use rand::{
Rng,
seq::SliceRandom,
};
/// Permute in-place all matrix rows with respect to target vector.
pub fn shuffle(X: &mut Matrix, y: &mut Vector) {
let mut permutation: Vec<usize> = (0..X.rows).collect();
permutation.shuffle(&mut rand::thread_rng());
for indices in permutation.chunks(2) {
let (i, j) = match indices {
&[i, j] => (i, j),
_ => continue
};
y.swap(i, j);
for k in 0..X.cols {
X.data.swap(i * X.cols + k, j * X.cols + k);
}
}
}
/// Scalar product of two slices.
pub fn dot(lhs: &[f64], rhs: &[f64]) -> f64 {
lhs.iter().zip(rhs.iter())
.map(|(l, r)| l * r)
.sum()
}
/// L2 norm of a vector.
pub fn norm(x: &[f64]) -> f64 {
x.iter()
.map(|&x| f64::powi(x, 2))
.sum::<f64>()
.sqrt()
}
/// Generate `f`-dependent train matrix and target with random noise.
pub fn gen_dataset<F>(n: usize, range: (f64, f64), noise: f64, f: F)
-> (Matrix, Vector)
where F: Fn(f64) -> f64,
{
let mut X = Matrix::new(n, 1);
let mut y = vec![0f64; n];
let mut rng = rand::thread_rng();
let mut x = range.0;
let delta = (range.1 - range.0) / n as f64;
for i in 0..n {
y[i] = f(x);
X[[i, 0]] = x + noise * rng.gen_range(-range.1, range.1);
x += delta;
}
(X, y)
}
pub fn slice(X: &Matrix, y: &Vector, idx: &[usize]) -> (Matrix, Vector) {
let X = X.slice(idx);
let y = idx.iter().map(|&i| y[i]).collect();
(X, y)
}
pub trait FromCSV {
/// Read from `.csv` file.
/// Trait is used due to type-aliasing limitations with `Vec<f64>`.
fn read<P: AsRef<Path> + ?Sized>(filename: &P) -> Result<Self, Box<dyn Error>>
where Self: Sized;
}
pub type Vector = Vec<f64>;
impl FromCSV for Vector {
fn read<P: AsRef<Path> + ?Sized>(filename: &P) -> Result<Self, Box<dyn Error>> {
let file = File::open(filename)?;
let mut reader = csv::ReaderBuilder::new()
.has_headers(false)
.from_reader(file);
let mut data = Vec::new();
for result in reader.records() {
let record = result?;
data.push(record.get(0).unwrap().parse::<f64>()?);
}
Ok(data)
}
}
#[derive(Debug, Clone)]
pub struct Matrix {
rows: usize,
cols: usize,
data: Vec<f64>,
}
impl Matrix {
pub fn new(n: usize, m: usize) -> Self {
Matrix {
rows: n,
cols: m,
data: vec![0f64; n * m],
}
}
pub fn slice(&self, idx: &[usize]) -> Self {
let mut data = Vec::with_capacity(idx.len() * self.cols);
unsafe {
data.set_len(idx.len() * self.cols);
}
for (&i, k) in idx.iter().zip(0..self.rows) {
for j in 0..self.cols {
data[k * self.cols + j] = self[[i, j]];
}
}
Matrix {
rows: idx.len(),
cols: self.cols,
data,
}
}
pub fn rows(&self) -> usize { self.rows }
pub fn cols(&self) -> usize { self.cols }
pub fn transpose(&self) -> Self {
let mut data = Vec::with_capacity(self.rows * self.cols);
unsafe {
data.set_len(self.rows * self.cols);
}
for i in 0..self.cols {
for j in 0..self.rows {
data[i * self.rows + j] = self[[j, i]];
}
}
Matrix {
rows: self.cols,
cols: self.rows,
data,
}
}
}
impl FromCSV for Matrix {
fn read<P: AsRef<Path> + ?Sized>(filename: &P) -> Result<Self, Box<dyn Error>> {
let file = File::open(filename)?;
let mut reader = csv::ReaderBuilder::new()
.has_headers(false)
.from_reader(file);
let mut rows = 0;
let mut cols = 0;
let mut data = Vec::new();
for result in reader.records() {
let record = result?;
rows += 1;
cols = record.len();
for element in record.iter() {
data.push(element.parse::<f64>()?);
}
}
Ok(Matrix {
rows,
cols,
data,
})
}
}
/// Return row of matrix as the iterable slice.
/// Each row represents observation, thus it's useful for regression.
impl Index<usize> for Matrix {
type Output = [f64];
fn index(&self, idx: usize) -> &Self::Output {
if idx >= self.rows {
panic!("index out of bounds: the rows num is {} but the index is {}", self.rows, idx);
}
&self.data[idx * self.cols..(idx + 1) * self.cols]
}
}
impl Index<[usize; 2]> for Matrix {
type Output = f64;
fn index(&self, idx: [usize; 2]) -> &Self::Output {
let (i, j) = (idx[0], idx[1]);
if i >= self.rows || j >= self.cols {
panic!("index out of bounds: the dim is {},{} but the index is {},{}",
self.rows, self.cols, i, j);
}
&self.data[i * self.cols + j]
}
}
impl Index<Range<usize>> for Matrix {
type Output = [f64];
fn index(&self, idx: Range<usize>) -> &Self::Output {
let Range { start: i, end: j } = idx;
if j >= self.rows {
panic!("index out of bounds: the rows num is {} but the index is {}", self.rows, j);
}
&self.data[i * self.cols..j * self.cols]
}
}
impl IndexMut<usize> for Matrix {
fn index_mut(&mut self, idx: usize) -> &mut Self::Output {
if idx >= self.rows {
panic!("index out of bounds: the rows num is {} but the index is {}", self.rows, idx);
}
&mut self.data[idx * self.cols..(idx + 1) * self.cols]
}
}
impl IndexMut<[usize; 2]> for Matrix {
fn index_mut(&mut self, idx: [usize; 2]) -> &mut Self::Output {
let (i, j) = (idx[0], idx[1]);
if i >= self.rows || j >= self.cols {
panic!("index out of bounds: the dim is {},{} but the index is {},{}",
self.rows, self.cols, i, j);
}
&mut self.data[i * self.cols + j]
}
}
impl IndexMut<Range<usize>> for Matrix {
fn index_mut(&mut self, idx: Range<usize>) -> &mut Self::Output {
let Range { start: i, end: j } = idx;
if j >= self.rows {
panic!("index out of bounds: the rows num is {} but the index is {}", self.rows, j);
}
&mut self.data[i * self.cols..j * self.cols]
}
} |
//! # rustable
//! rustable is yet another library for interfacing Bluez over DBus.
//! Its objective is to be a comprehensive tool for creating Bluetooth Low Energy
//! enabled applications in Rust on Linux. It will supports to interacting with remote
//! devices as a GATT client, and creating local services as a GATT Server.
//! It currently allows the creation of advertisements/broadcasts as a Bluetooth peripherial.
//!
//! ## Supported Features
//! ### GAP Peripheral
//! - Advertisements
//! - Broadcasts
//! ### GATT Server
//! - Creating local services
//! - Reading local characteristics from remote devices.
//! - Writing to local characteristics from remote devices.
//! - Write-without-response via sockets from remote devices (AcquireWrite).
//! - Notifying/Indicating local characteristics with sockets (AcquireNotify).
//! - Reading local descriptors from remote devices.
//!
//! **To Do:**
//! - Writable descriptors.
//! ### GATT Client
//! - Retreiving attribute metadata (Flags, UUIDs...).
//! - Reading from remote characteristics.
//! - Writing to remote characteristics.
//! - Write-without-response via sockets to remote devices (AcquireWrite).
//! - Receiving remote notification/indications with sockets.
//!
//! **To Do:**
//! - Descriptors as a client.
//! ## Development status
//! This library is unstable in *alpha*. There are planned functions
//! in the API that have yet to be implemented. Unimplemented function are noted.
//! The API is subject to breaking changes.
//!
use gatt::*;
use nix::unistd::close;
use rustbus::client_conn;
use rustbus::client_conn::{Conn, RpcConn, Timeout};
use rustbus::message_builder::{DynamicHeader, MarshalledMessage, MessageBuilder, MessageType};
use rustbus::params;
use rustbus::params::message::Message;
use rustbus::params::{Base, Container, Param};
use rustbus::signature;
use rustbus::standard_messages;
use rustbus::wire::marshal::traits::{Marshal, Signature};
use rustbus::wire::unmarshal;
use rustbus::wire::unmarshal::traits::Unmarshal;
use rustbus::wire::unmarshal::Error as UnmarshalError;
use rustbus::{get_system_bus_path, ByteOrder};
use std::cell::{Cell, RefCell};
use std::collections::{HashMap, VecDeque};
use std::convert::{TryFrom, TryInto};
use std::ffi::OsString;
use std::fmt::Write;
use std::fmt::{Debug, Display, Formatter};
use std::num::ParseIntError;
use std::os::unix::prelude::{AsRawFd, RawFd};
use std::path::{Component, Path, PathBuf};
use std::rc::{Rc, Weak};
mod bluetooth_cb;
pub mod path;
enum PendingType<T: 'static, U: 'static> {
MessageCb(&'static dyn Fn(MarshalledMessage, U) -> T),
PreResolved(T),
}
/// A struct representing pending Dbus Method-calls.
///
/// Many methods in this library return `Pending` (usually wrapped in `Result`).
/// These methods are performed issuing a DBus Method-call to the Bluez daemon.
/// This struct represents a pending response, that can be resolved using [`Bluetooth::resolve()`]/[`try_resolve()`]
/// This allows for multiple DBus requests to be issued at onces allow for more concurrent processing,
/// such as reading multiple characteristics at once.
/// ## Notes
/// - If using multiple [`Bluetooth`] instances in one application, the `Pending` must be resolved with the `Bluetooth` instance
/// that created it.
/// - `Pending` implements [`Drop`], that will cause it to be resolved automatically by its parent `Bluetooth`.
///
///
/// [`Bluetooth`]: ./struct.Bluetooth.html
/// [`Bluetooth::resolve()`]: ./struct.Bluetooth.html#method.resolve
/// [`try_resolve()`]: ./struct.Bluetooth.html#method.try_resolve
/// [`Drop`]: ./struct.Pending.html#impl-Drop
pub struct Pending<T: 'static, U: 'static> {
dbus_res: u32,
typ: Option<PendingType<T, U>>,
data: Option<U>, // this option allows it to be take
leaking: Weak<RefCell<VecDeque<(u32, Box<dyn FnOnce(MarshalledMessage)>)>>>,
}
impl<T: 'static, U: 'static> Drop for Pending<T, U> {
fn drop(&mut self) {
if let Some(PendingType::MessageCb(cb)) = self.typ.take() {
if let Some(leaking) = self.leaking.upgrade() {
let data = self.data.take().unwrap();
let fo_cb = move |call: MarshalledMessage| {
(cb)(call, data);
};
leaking
.borrow_mut()
.push_back((self.dbus_res, Box::new(fo_cb)));
}
}
}
}
/// Returned by [`Bluetooth::try_resolve()`] to distinguish between
/// Errors, and results that didn't finish.
pub enum ResolveError<T: 'static, U: 'static> {
StillPending(Pending<T, U>),
Error(Pending<T, U>, Error),
}
pub mod interfaces;
mod advertisement;
pub use advertisement::*;
mod device;
pub use device::*;
use interfaces::*;
pub mod gatt;
mod introspect;
use introspect::Introspectable;
#[cfg(test)]
mod tests;
pub type UUID = Rc<str>;
pub type MAC = Rc<str>;
pub const MAX_APP_MTU: usize = 244;
pub const MAX_CHAR_LEN: usize = 512;
/// This trait creates a UUID from the implementing Type.
/// This trait can panic if the given type doesn't represent a valid uuid.
/// Only 128-bit uuids are supported at the moment.
/// ## Note
/// This trait exists because UUID and MAC will eventually be converted into
/// their own structs rather than being aliases for `Rc<str>`
pub trait ToUUID {
fn to_uuid(self) -> UUID;
}
impl ToUUID for &str {
fn to_uuid(self) -> UUID {
assert!(validate_uuid(self));
self.into()
}
}
impl ToUUID for String {
fn to_uuid(self) -> UUID {
assert!(validate_uuid(&self));
self.into()
}
}
impl ToUUID for UUID {
fn to_uuid(self) -> UUID {
self
}
}
impl ToUUID for &UUID {
fn to_uuid(self) -> UUID {
self.clone()
}
}
impl ToUUID for u128 {
fn to_uuid(self) -> UUID {
format!(
"{:08x}-{:04x}-{:04x}-{:04x}-{:012x}",
self >> 24,
(self >> 20) & 0xFFFF,
(self >> 16) & 0xFFFF,
(self >> 12) & 0xFFFF,
self & 0xFFFFFFFFFFFF
)
.into()
}
}
/// Checks if a string is valid MAC.
/// Currently MAC address must be uppercase and use ':' as the seperator.
/// These former requirement will be removed in the future, and '_' will also be accepted as a seperator.
fn validate_mac(mac: &str) -> bool {
if mac.len() != 17 {
return false;
}
let mut char_iter = mac.chars();
for _ in 0..5 {
if char_iter.next().unwrap().is_lowercase() {
return false;
}
if char_iter.next().unwrap().is_lowercase() {
return false;
}
if char_iter.next().unwrap() != ':' {
return false;
}
}
for i in 0..6 {
let tar = i * 3;
if u8::from_str_radix(&mac[tar..tar + 2], 16).is_err() {
return false;
}
}
true
}
pub trait ToMAC {
fn to_mac(self) -> MAC;
}
impl ToMAC for &str {
fn to_mac(self) -> MAC {
assert!(validate_mac(self));
self.into()
}
}
impl ToMAC for String {
fn to_mac(self) -> MAC {
assert!(validate_mac(&self));
self.into()
}
}
enum DbusObject {
Gatt(UUID, Option<(UUID, Option<UUID>)>),
Ad(usize),
Appl,
None,
}
#[derive(Debug)]
pub enum Error {
DbusClient(client_conn::Error),
DbusReqErr(String),
Bluez(String),
BadInput(String),
NoFd(String),
Unix(nix::Error),
Timeout,
}
impl From<nix::Error> for Error {
fn from(err: nix::Error) -> Self {
match err {
nix::Error::Sys(nix::errno::Errno::EAGAIN) => Error::Timeout,
err => Error::Unix(err),
}
}
}
impl Display for Error {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
Debug::fmt(&self, f)
}
}
impl std::error::Error for Error {}
impl From<client_conn::Error> for Error {
fn from(err: client_conn::Error) -> Self {
Error::DbusClient(err)
}
}
impl From<rustbus::wire::unmarshal::Error> for Error {
fn from(err: rustbus::wire::unmarshal::Error) -> Self {
Error::DbusReqErr(format!("Parameter failed to unmarshal: {:?}", err))
}
}
impl TryFrom<&'_ Message<'_, '_>> for Error {
type Error = &'static str;
fn try_from(msg: &Message) -> Result<Self, Self::Error> {
match msg.typ {
MessageType::Error => (),
_ => return Err("Message was not an error"),
}
let err_name = match &msg.dynheader.error_name {
Some(name) => name,
None => return Err("Message was missing error name"),
};
let err_text = if let Some(Param::Base(Base::String(err_text))) = msg.params.get(0) {
Some(err_text)
} else {
None
};
Ok(Error::DbusReqErr(format!(
"Dbus request error: {}, text: {:?}",
err_name, err_text
)))
}
}
/// `Bluetooth` is created to interact with Bluez over DBus and file descriptors.
pub struct Bluetooth {
rpc_con: RpcConn,
blue_path: Rc<Path>,
name: String,
path: PathBuf,
pub verbose: u8,
/// Allows for verbose messages to be printed to stderr. Useful for debugging.
services: HashMap<UUID, LocalServiceBase>,
registered: bool,
filter_dest: Option<(String, Option<String>)>,
ads: VecDeque<Advertisement>,
service_index: u8,
ad_index: u16,
devices: HashMap<MAC, RemoteDeviceBase>,
comp_map: HashMap<OsString, MAC>,
powered: Rc<Cell<bool>>,
discoverable: Rc<Cell<bool>>,
leaking: Rc<RefCell<VecDeque<(u32, Box<dyn FnOnce(MarshalledMessage)>)>>>,
addr: MAC,
}
impl Bluetooth {
/// Creates a new `Bluetooth` and setup a DBus client to interact with Bluez.
pub fn new(dbus_name: String, blue_path: String) -> Result<Self, Error> {
let session_path = get_system_bus_path()?;
let conn = Conn::connect_to_bus(session_path, true)?;
let mut rpc_con = RpcConn::new(conn);
rpc_con.send_message(&mut standard_messages::hello(), Timeout::Infinite)?;
let namereq = rpc_con.send_message(
&mut standard_messages::request_name(dbus_name.clone(), 0),
Timeout::Infinite,
)?;
let res = rpc_con.wait_response(namereq, Timeout::Infinite)?;
if let Some(_) = &res.dynheader.error_name {
return Err(Error::DbusReqErr(format!(
"Error Dbus client name {:?}",
res
)));
}
let services = HashMap::new();
let mut path = String::new();
path.push('/');
path.push_str(&dbus_name.replace(".", "/"));
let path = PathBuf::from(path);
let blue_path: &Path = blue_path.as_ref();
let mut ret = Bluetooth {
rpc_con,
name: dbus_name,
verbose: 0,
services,
registered: false,
blue_path: blue_path.into(),
path,
filter_dest: None,
ads: VecDeque::new(),
service_index: 0,
ad_index: 0,
devices: HashMap::new(),
comp_map: HashMap::new(),
leaking: Rc::new(RefCell::new(VecDeque::new())),
powered: Rc::new(Cell::new(false)),
discoverable: Rc::new(Cell::new(false)),
addr: "00:00:00:00:00:00".into(),
};
ret.rpc_con.set_filter(Box::new(move |msg| match msg.typ {
MessageType::Call => true,
MessageType::Error => true,
MessageType::Reply => true,
MessageType::Invalid => false,
MessageType::Signal => true,
}));
ret.set_filter(Some(BLUEZ_DEST.to_string()))?;
ret.setup_match()?;
ret.update_adapter_props()?;
Ok(ret)
}
fn setup_match(&mut self) -> Result<(), Error> {
let match_str = format!(
"sender='{}',path_namespace='{}',type='signal',",
BLUEZ_DEST,
self.blue_path.to_str().unwrap()
);
eprintln!("{}", match_str); // TODO remvoe
let mut msg = standard_messages::add_match(match_str);
let res_idx = self.rpc_con.send_message(&mut msg, Timeout::Infinite)?;
let res = self.rpc_con.wait_response(res_idx, Timeout::Infinite)?;
match res.typ {
MessageType::Reply => Ok(()),
MessageType::Error => Err(Error::DbusReqErr(format!(
"Error adding match: {}",
res.dynheader.error_name.unwrap()
))),
_ => unreachable!(),
}
}
/// Forces an update of the local cached adapter state by reading the
/// properties using a Dbus message.
///
/// Generally the locally cached state of the adapter is kept up to date,
/// by reading DBus signals coming from the Bluez daemon, so this method
/// doesn't typically need to be called.
pub fn update_adapter_props(&mut self) -> Result<(), Error> {
// TODO: should this return a Pending
// get properties of local adapter
let mut msg = MessageBuilder::new()
.call("GetAll".to_string())
.with_interface(PROP_IF_STR.to_string())
.on(self.blue_path.to_str().unwrap().to_string())
.at(BLUEZ_DEST.to_string())
.build();
msg.body.push_param(ADAPTER_IF_STR.to_string()).unwrap();
let res_idx = self.rpc_con.send_message(&mut msg, Timeout::Infinite)?;
let res = self.rpc_con.wait_response(res_idx, Timeout::Infinite)?;
match res.typ {
MessageType::Reply => {
let blue_props: HashMap<String, Variant> = res.body.parser().get()?;
self.update_from_props(blue_props)
}
MessageType::Error => Err(Error::DbusReqErr(format!(
"Error getting dbus adapter props: {:?}",
res
))),
_ => unreachable!(),
}
}
fn update_from_props(&mut self, mut blue_props: HashMap<String, Variant>) -> Result<(), Error> {
let powered = match blue_props.remove("Powered") {
Some(var) => var.get()?,
None => {
return Err(Error::DbusReqErr(
"No 'Powered' property was present on adapter!".to_string(),
))
}
};
let addr = match blue_props.remove("Address") {
Some(var) => {
let addr_str: String = var.get()?;
if validate_mac(&addr_str) {
addr_str.to_mac()
} else {
return Err(Error::DbusReqErr(
"'Address' property was in invalid format!".to_string(),
));
}
}
None => {
return Err(Error::DbusReqErr(
"No 'Address' property was present on adapter!".to_string(),
))
}
};
let discoverable = match blue_props.remove("Discoverable") {
Some(var) => var.get()?,
None => {
return Err(Error::DbusReqErr(
"No 'Discoverable' property was present on adapter!".to_string(),
))
}
};
self.powered.replace(powered);
self.discoverable.replace(discoverable);
self.addr = addr;
Ok(())
}
fn update_from_changed(&mut self, blue_props: HashMap<String, Variant>) -> Result<(), Error> {
for (prop, var) in blue_props {
match prop.as_str() {
"Powered" => self.powered.set(var.get()?),
"Address" => self.addr = var.get::<String>()?.to_uuid(),
"Discoverable" => self.discoverable.set(var.get()?),
_ => (),
}
}
Ok(())
}
/// Get the `MAC` of the local adapter.
pub fn addr(&self) -> &MAC {
&self.addr
}
/// Allows for changing which messages sources are allowed or rejected.
///
/// By default, `Bluetooth`'s filter is set to org.bluez, only allowing
/// it to receive messages from the bluetooth daemon.
/// This method allows this filter to be eliminated (`None`) or set to something else.
/// This can be useful for debugging.
pub fn set_filter(&mut self, filter: Option<String>) -> Result<(), Error> {
match filter {
Some(name) => {
let mut nameowner = MessageBuilder::new()
.call("GetNameOwner".to_string())
.with_interface("org.freedesktop.DBus".to_string())
.at("org.freedesktop.DBus".to_string())
.on("/org/freedesktop/DBus".to_string())
.build();
nameowner.body.push_param(name.clone()).unwrap();
let res_idx = self
.rpc_con
.send_message(&mut nameowner, Timeout::Infinite)?;
let res = self.rpc_con.wait_response(res_idx, Timeout::Infinite)?;
match res.typ {
MessageType::Reply => {
let owner = res.body.parser().get()?;
if owner == name {
self.filter_dest = Some((name, None));
} else {
self.filter_dest = Some((name, Some(owner)));
}
}
MessageType::Error => self.filter_dest = Some((name, None)),
_ => unreachable!(),
}
}
None => self.filter_dest = None,
}
Ok(())
}
/// Gets the path of the DBus client
pub fn get_path(&self) -> &Path {
&self.path
}
/// Adds a service to the `Bluetooth` instance. Once registered with [`register_application()`],
/// this service will be a local service that can be interacted with by remote devices.
///
/// If `register_application()` has already called, the service will not be visible to
/// Bluez (or other devices) until the application in reregistered.
///
/// [`register_application()`]: ./struct.Bluetooth.html#method.register_application
pub fn add_service(&mut self, mut service: LocalServiceBase) -> Result<(), Error> {
if self.services.len() >= 255 {
panic!("Cannot add more than 255 services");
}
service.index = self.service_index;
self.service_index += 1;
let path = self.path.to_owned();
service.update_path(path);
self.services.insert(service.uuid.clone(), service);
Ok(())
}
/// Access a service that has been added to the `Bluetooth` instance.
pub fn get_service<T: ToUUID>(&mut self, uuid: T) -> Option<LocalService<'_>> {
let uuid = uuid.to_uuid();
if self.services.contains_key(&uuid) {
Some(LocalService {
uuid: uuid.clone(),
bt: self,
})
} else {
None
}
}
/// Gets a remote device by `MAC`. The device must be discovered using `discover_devices()` from bluez,
/// before it can be gotten with this device.
pub fn get_device<'c>(&'c mut self, mac: &MAC) -> Option<RemoteDevice<'c>> {
let _base = self.devices.get_mut(mac)?;
Some(RemoteDevice {
blue: self,
mac: mac.clone(),
#[cfg(feature = "unsafe-opt")]
ptr: _base,
})
}
/// Gets a `HashSet` of known devices' `MAC` addresses.
pub fn devices(&self) -> Vec<MAC> {
self.devices.keys().map(|x| x.clone()).collect()
}
fn register_adv(&mut self, adv_loc: usize) -> Result<(), Error> {
let mut msg = MessageBuilder::new()
.call("RegisterAdvertisement".to_string())
.with_interface("org.bluez.LEAdvertisingManager1".to_string())
.on(self.blue_path.to_str().unwrap().to_string())
.at(BLUEZ_DEST.to_string())
.build();
let dict = params::Dict {
key_sig: signature::Base::String,
value_sig: signature::Type::Container(signature::Container::Variant),
map: HashMap::new(),
};
let adv_path_str = self.ads[adv_loc].path.to_str().unwrap().to_string();
msg.body
.push_old_params(&[
Param::Base(Base::ObjectPath(adv_path_str)),
Param::Container(Container::Dict(dict)),
])
.unwrap();
let res_idx = self.rpc_con.send_message(&mut msg, Timeout::Infinite)?;
loop {
self.process_requests()?;
if let Some(res) = self.rpc_con.try_get_response(res_idx) {
return match res.typ {
MessageType::Error => {
let mut err = None;
let res = res.unmarshall_all().unwrap();
if let Some(err_str) = res.params.get(0) {
if let Param::Base(Base::String(err_str)) = err_str {
err = Some(err_str);
}
}
let err_str = if let Some(err) = err {
format!(
"Failed to register application with bluez: {}: {:?}",
res.dynheader.error_name.unwrap(),
err
)
} else {
format!(
"Failed to register application with bluez: {}",
res.dynheader.error_name.unwrap()
)
};
// eprintln!("error: {}", err_str);
Err(Error::Bluez(err_str))
}
MessageType::Reply => {
if self.verbose >= 1 {
eprintln!("Registered application with bluez.");
};
self.ads.back_mut().unwrap().active = true;
Ok(())
}
_ => unreachable!(),
};
}
}
}
/// Registers an advertisement with Bluez.
/// After a successful call, it will persist until the `remove_advertise()`/`remove_advertise_no_dbus()`
/// is called or Bluez releases the advertisement (this is typically done on device connect).
///
/// **Calls process_requests()**
pub fn start_adv(&mut self, mut adv: Advertisement) -> Result<u16, (u16, Error)> {
let ad_loc = self.ads.len();
adv.index = self.ad_index;
let ret_idx = self.ad_index;
self.ad_index += 1;
let adv_path = self.path.join(format!("adv{:04x}", adv.index));
adv.path = adv_path;
self.ads.push_back(adv);
self.register_adv(ad_loc)
.map(|_| ret_idx as u16)
.map_err(|err| (ret_idx as u16, err))
}
/// Checks if an advertisement is still active, or if Bluez has signaled it has ended.
pub fn is_adv_active(&self, index: u16) -> Option<bool> {
let adv = self.ads.iter().find(|ad| ad.index == index)?;
Some(adv.active)
}
/// Restart an inactive advertisement.
///
/// If the advertisement is already active this method,
/// does nothing and returns `false`. If an advertisement is not active it tries to
/// reregister the advertisement and returns `true` on success otherwise it returns and `Err`.
pub fn restart_adv(&mut self, index: u16) -> Result<bool, Error> {
let idx = match self.ads.iter().position(|ad| ad.index == index) {
Some(idx) => idx,
None => {
return Err(Error::BadInput(format!(
"Advertisement index {} not found.",
index
)))
}
};
if self.ads[idx].active {
Ok(false)
} else {
self.register_adv(idx).map(|_| true)
}
}
/// Unregisters an advertisement with Bluez. Returns the `Advertisement` if successful.
///
/// **Calls process_requests()**
pub fn remove_adv(&mut self, index: u16) -> Result<Advertisement, Error> {
let idx = match self.ads.iter().position(|ad| ad.index == index) {
Some(idx) => idx,
None => {
return Err(Error::BadInput(format!(
"Advertisement index {} not found.",
index
)))
}
};
if !self.ads[idx].active {
return Ok(self.ads.remove(idx).unwrap());
}
let mut msg = MessageBuilder::new()
.call("UnregisterAdvertisement".to_string())
.with_interface("org.bluez.LEAdvertisingManager1".to_string())
.on(self.blue_path.to_str().unwrap().to_string())
.at(BLUEZ_DEST.to_string())
.build();
let path = self.ads[idx].path.to_str().unwrap().to_string();
msg.body
.push_old_param(&Param::Base(Base::ObjectPath(path)))
.unwrap();
let res_idx = self.rpc_con.send_message(&mut msg, Timeout::Infinite)?;
loop {
self.process_requests()?;
if let Some(res) = self.rpc_con.try_get_response(res_idx) {
match res.typ {
MessageType::Reply => {
let mut adv = self.ads.remove(idx).unwrap();
adv.active = false;
return Ok(adv);
}
MessageType::Error => {
return Err(Error::DbusReqErr(format!(
"UnregisterAdvertisement call failed: {:?}",
res
)))
}
_ => unreachable!(),
}
}
}
}
pub fn remove_all_adv(&mut self) -> Result<(), Error> {
while self.ads.len() > 0 {
self.remove_adv(self.ads[0].index)?;
}
Ok(())
}
/// Removes the advertisement from the `Bluetooth` instance but does not unregister the
/// advertisement with Bluez. It is recommended that this is not used.
pub fn remove_adv_no_dbus(&mut self, index: u16) -> Option<Advertisement> {
let idx = self.ads.iter().position(|ad| ad.index == index)?;
self.ads.remove(idx)
}
/// Set the Bluez controller power on (`true`) or off.
pub fn set_power(
&mut self,
on: bool,
) -> Result<Pending<Result<(), Error>, (Rc<Cell<bool>>, bool, &'static str)>, Error> {
let mut msg = MessageBuilder::new()
.call("Set".to_string())
.on(self.blue_path.to_str().unwrap().to_string())
.with_interface(PROP_IF_STR.to_string())
.at(BLUEZ_DEST.to_string())
.build();
msg.body.push_param2(ADAPTER_IF_STR, "Powered").unwrap();
msg.body.push_variant(on).unwrap();
let dbus_res = self.rpc_con.send_message(&mut msg, Timeout::Infinite)?;
Ok(Pending {
typ: Some(PendingType::MessageCb(&bluetooth_cb::set_power_cb)),
dbus_res,
data: Some((self.powered.clone(), on, bluetooth_cb::POWER)),
leaking: Rc::downgrade(&self.leaking),
})
}
/// Set the Bluez controller power on (`true`) or off and waits for the response.
///
/// **Calls process_requests()**
pub fn set_power_wait(&mut self, on: bool) -> Result<(), Error> {
let pend = self.set_discoverable(on)?;
self.wait_result_variant(pend)
}
/// Set whether the Bluez controller should be discoverable (`true`) or not.
pub fn set_discoverable(
&mut self,
on: bool,
) -> Result<Pending<Result<(), Error>, (Rc<Cell<bool>>, bool, &'static str)>, Error> {
let mut msg = MessageBuilder::new()
.call("Set".to_string())
.on(self.blue_path.to_str().unwrap().to_string())
.with_interface(PROP_IF_STR.to_string())
.at(BLUEZ_DEST.to_string())
.build();
msg.body
.push_param2(ADAPTER_IF_STR, "Discoverable")
.unwrap();
msg.body.push_variant(on).unwrap();
let dbus_res = self.rpc_con.send_message(&mut msg, Timeout::Infinite)?;
Ok(Pending {
typ: Some(PendingType::MessageCb(&bluetooth_cb::set_power_cb)),
dbus_res,
data: Some((self.discoverable.clone(), on, bluetooth_cb::DISCOVERABLE)),
leaking: Rc::downgrade(&self.leaking),
})
}
/// Get whether the Bluez controller is currently discoverable.
pub fn discoverable(&self) -> bool {
self.discoverable.get()
}
/// Set whether the Bluez controller should be discoverable (`true`) or not and waits for the response.
///
/// **Calls process_requests()**
pub fn set_discoverable_wait(&mut self, on: bool) -> Result<(), Error> {
let pend = self.set_discoverable(on)?;
self.wait_result_variant(pend)
}
fn wait_result_variant<O, U>(
&mut self,
pend: Pending<Result<O, Error>, U>,
) -> Result<O, Error> {
match self.resolve(pend) {
Ok(res) => res,
Err((_pend, err)) => Err(err),
}
}
/// Get whether the Bluez controller is currently powered.
pub fn power(&mut self) -> bool {
self.powered.get()
}
/// Registers the local application's GATT services/characteristics/descriptors.
/// with the Bluez controller.
///
/// **Calls process_requests()**
pub fn register_application(&mut self) -> Result<(), Error> {
let path = self.get_path();
let empty_dict = HashMap::new();
let dict = params::Dict {
key_sig: signature::Base::String,
value_sig: signature::Type::Container(signature::Container::Variant),
map: empty_dict,
};
let call_builder = MessageBuilder::new().call(REGISTER_CALL.to_string());
/*call_builder.add_param2(
Param::Base(Base::ObjectPath(
path.as_os_str().to_str().unwrap().to_string(),
)),
Param::Container(Container::Dict(dict)),
);*/
let mut msg = call_builder
.with_interface(MANAGER_IF_STR.to_string())
.on(self.blue_path.to_str().unwrap().to_string())
.at(BLUEZ_DEST.to_string())
.build();
msg.body
.push_old_params(&[
Param::Base(Base::ObjectPath(
path.as_os_str().to_str().unwrap().to_string(),
)),
Param::Container(Container::Dict(dict)),
])
.unwrap();
// eprintln!("registration msg: {:#?}", msg);
let msg_idx = self.rpc_con.send_message(&mut msg, Timeout::Infinite)?;
// we expect there to be no response
loop {
self.process_requests()?;
if let Some(res) = self.rpc_con.try_get_response(msg_idx) {
return if let MessageType::Error = res.typ {
let mut err = None;
let res = res.unmarshall_all().unwrap();
if let Some(err_str) = res.params.get(0) {
if let Param::Base(Base::String(err_str)) = err_str {
err = Some(err_str);
}
}
let err_str = if let Some(err) = err {
format!(
"Failed to register application with bluez: {}: {:?}",
res.dynheader.error_name.unwrap(),
err
)
} else {
format!(
"Failed to register application with bluez: {}",
res.dynheader.error_name.unwrap()
)
};
// eprintln!("error: {}", err_str);
Err(Error::Bluez(err_str))
} else {
if self.verbose >= 1 {
eprintln!("Registered application with bluez.");
};
Ok(())
};
}
}
}
/// **Unimplemented**
///
/// Unregisters the local GATT services from the Bluez controller.
///
/// **Calls process_requests()**
pub fn unregister_application(&mut self) -> Result<(), Error> {
unimplemented!();
self.registered = false;
Ok(())
}
fn check_incoming(&self, sender: &str) -> bool {
match &self.filter_dest {
Some(dest) => {
!(&dest.0 != sender && (dest.1 == None || dest.1.as_ref().unwrap() != sender))
}
None => true,
}
}
/// Process incoming DBus requests for the local application.
///
/// When using `Bluetooth` this function should be called on a regular basis.
/// Bluez uses DBus to handle read/write requests to characteristics and descriptors, as wells
/// advertisement. Failure to call this method frequently enough could result in requests from
/// GATT clients to timeout. Some other functions are guaranteed to call this function at least
/// once while waiting for a responses from the Bluez controller. This property is noted in these
/// functions' descriptions.
///
pub fn process_requests(&mut self) -> Result<(), Error> {
let responses = self.rpc_con.refill_all()?;
for mut response in responses {
self.rpc_con
.send_message(&mut response, Timeout::Infinite)?;
}
let mut leaking_bm = self.leaking.borrow_mut();
while leaking_bm.len() > 0 {
match self.rpc_con.try_get_response(leaking_bm[0].0) {
Some(call) => {
let (_, cb) = leaking_bm.pop_front().unwrap();
(cb)(call);
}
None => break,
}
}
drop(leaking_bm);
while let Some(call) = self.rpc_con.try_get_call() {
// eprintln!("received call {:?}", call);
let interface = (&call.dynheader.interface).as_ref().unwrap();
let sender = call.dynheader.sender.as_ref().unwrap();
if !self.check_incoming(sender) {
let mut msg = call.dynheader.make_error_response(
BLUEZ_NOT_PERM.to_string(),
Some("Sender is not allowed to perform this action.".to_string()),
);
self.rpc_con.send_message(&mut msg, Timeout::Infinite)?;
continue;
}
let mut reply = match self.match_root(&call.dynheader) {
DbusObject::Appl => match interface.as_ref() {
PROP_IF_STR => self.properties_call(call),
OBJ_MANAGER_IF_STR => self.objectmanager_call(call),
INTRO_IF_STR => self.introspectable(call),
_ => standard_messages::unknown_method(&call.dynheader),
},
DbusObject::Gatt(serv_uuid, child_uuid) => {
let serv_base = self.services.get_mut(&serv_uuid).unwrap();
match child_uuid {
Some((char_uuid, child_uuid)) => {
let char_base = serv_base.chars.get_mut(&char_uuid).unwrap();
match child_uuid {
Some(desc_uuid) => {
// Descriptor
let desc_base = char_base.descs.get_mut(&desc_uuid).unwrap();
match interface.as_ref() {
PROP_IF_STR => desc_base.properties_call(call),
DESC_IF_STR => {
let mut serv = LocalService::new(self, serv_uuid);
let mut character =
LocalChar::new(&mut serv, char_uuid);
let mut desc =
LocalDesc::new(&mut character, desc_uuid);
desc.desc_call(call)
}
INTRO_IF_STR => desc_base.introspectable(call),
_ => standard_messages::unknown_method(&call.dynheader),
}
}
None => {
// Charactersitic
match interface.as_ref() {
PROP_IF_STR => char_base.properties_call(call),
CHAR_IF_STR => {
let mut serv = LocalService::new(self, serv_uuid);
let mut character =
LocalChar::new(&mut serv, char_uuid);
character.char_call(call)
}
INTRO_IF_STR => char_base.introspectable(call),
_ => standard_messages::unknown_method(&call.dynheader),
}
}
}
}
None => {
// Service
match interface.as_ref() {
PROP_IF_STR => serv_base.properties_call(call),
SERV_IF_STR => serv_base.service_call(call),
INTRO_IF_STR => serv_base.introspectable(call),
_ => standard_messages::unknown_method(&call.dynheader),
}
}
}
}
DbusObject::Ad(ad_idx) => {
let adv = &mut self.ads[ad_idx];
match interface.as_ref() {
PROP_IF_STR => adv.properties_call(call),
LEAD_IF_STR => match call.dynheader.member.as_ref().unwrap().as_str() {
"Release" => {
adv.active = false;
call.dynheader.make_response()
}
_ => standard_messages::unknown_method(&call.dynheader),
},
INTRO_IF_STR => adv.introspectable(call),
_ => standard_messages::unknown_method(&call.dynheader),
}
}
DbusObject::None => standard_messages::unknown_method(&call.dynheader),
};
/*
// eprintln!("replying: {:?}", reply);
match reply.body.parser().get_param() {
// Ok(param) => eprintln!("reply body: first param: {:#?}", param),
// Err(_) => eprintln!("reply body: no params"),
}
*/
self.rpc_con.send_message(&mut reply, Timeout::Infinite)?;
for fd in reply.raw_fds {
close(fd).ok();
}
}
while let Some(sig) = self.rpc_con.try_get_signal() {
match sig.dynheader.interface.as_ref().unwrap().as_str() {
OBJ_MANAGER_IF_STR => {
if !self.check_incoming(sig.dynheader.sender.as_ref().unwrap()) {
continue;
}
match sig.dynheader.member.as_ref().unwrap().as_str() {
IF_ADDED_SIG => self.interface_added(sig)?,
IF_REMOVED_SIG => self.interface_removed(sig)?,
_ => (),
}
}
DBUS_IF_STR => {
if sig.dynheader.sender.unwrap() != "org.freedesktop.Dbus" {
continue;
}
match sig.dynheader.member.as_ref().unwrap().as_str() {
NAME_LOST_SIG => {
if let Some(filter) = &mut self.filter_dest {
let lost_name: &str = sig.body.parser().get()?;
if filter.0 == lost_name {
filter.1 = None;
if self.verbose > 0 {
eprintln!(
"{} has disconnected for DBus?",
self.filter_dest.as_ref().unwrap().0
);
}
self.clear_devices();
}
}
}
NAME_OWNER_CHANGED => {}
_ => (),
}
}
PROP_IF_STR => {
if !self.check_incoming(sig.dynheader.sender.as_ref().unwrap()) {
continue;
}
match sig.dynheader.member.as_ref().unwrap().as_str() {
PROP_CHANGED_SIG => self.properties_changed(sig)?,
_ => (),
}
}
_ => (),
}
}
Ok(())
}
fn properties_changed(&mut self, sig: MarshalledMessage) -> Result<(), Error> {
if let Some(child) = self.match_remote(&sig.dynheader) {
let mut parser = sig.body.parser();
let interface: &str = parser.get()?;
let changed = parser.get()?;
match child {
Some((dev_mac, child)) => {
let dev = self.devices.get_mut(&dev_mac).unwrap();
match child {
Some((serv_uuid, child)) => {
let serv = dev.get_child(&serv_uuid).unwrap();
match child {
Some((char_uuid, child)) => {
let mut character = serv.get_child(&char_uuid).unwrap();
match child {
Some(desc_uuid) => unimplemented!(),
None => {
if interface == CHAR_IF_STR {
character.update_from_changed(changed)?;
}
}
}
}
None => {
if interface == SERV_IF_STR {
serv.update_from_changed(changed)?;
}
}
}
}
None => {
if interface == DEV_IF_STR {
dev.update_from_changed(changed)?;
}
}
}
}
None => {
if interface == ADAPTER_IF_STR {
self.update_from_changed(changed)?;
}
}
}
}
Ok(())
/*
Some(None) => {
let mut parser = sig.body.parser();
}
Some(Some((mac, None))) => {
let mut parser = sig.body.parser();
let interface: &str = parser.get()?;
if interface == DEV_IF_STR {
} else {
Ok(())
}
}
Some(Some((mac, Some((serv_uuid, child_uuid))))) => unimplemented!(),
_ => Ok(()),
*/
}
fn interface_added(&mut self, sig: MarshalledMessage) -> Result<(), Error> {
match self.match_remote(&sig.dynheader) {
Some(Some((mac, Some((serv_uuid, child_uuid))))) => {
let dev = self.devices.get_mut(&mac).unwrap();
let serv = dev.services.get_mut(&serv_uuid).unwrap();
match child_uuid {
Some((char_uuid, child_uuid)) => unimplemented!(),
None => {
let mut i_and_p: HashMap<String, HashMap<String, Variant>> =
sig.body.parser().get()?;
match i_and_p.remove("org.bluez.GattService1") {
Some(props) => serv.update_all(props),
None => Ok(()),
}
}
}
}
_ => Ok(()),
}
}
fn interface_removed(&mut self, sig: MarshalledMessage) -> Result<(), Error> {
match self.match_remote(&sig.dynheader) {
Some(Some((mac, child_uuid))) => match child_uuid {
Some((serv_uuid, child_uuid)) => unimplemented!(),
None => unimplemented!(),
},
Some(None) | None => Ok(()),
}
}
fn match_root(&mut self, dynheader: &DynamicHeader) -> DbusObject {
let path = self.get_path();
if let None = &dynheader.interface {
return DbusObject::None;
}
if let None = &dynheader.member {
return DbusObject::None;
}
// eprintln!("For path: {:?}, Checking msg for match", path);
let object = &dynheader.object.as_ref().unwrap();
let obj_path: &Path = object.as_ref();
if path.starts_with(obj_path) {
DbusObject::Appl
} else {
let serv_path = match obj_path.strip_prefix(path) {
Ok(path) => path,
Err(_) => return DbusObject::None,
};
if let Some(matc) = self.match_services(serv_path) {
return DbusObject::Gatt(matc.0, matc.1);
}
match self.match_advertisement(serv_path) {
Some(idx) => DbusObject::Ad(idx),
None => DbusObject::None,
}
}
}
fn match_remote(
&mut self,
header: &DynamicHeader,
) -> Option<Option<(MAC, Option<(UUID, Option<(UUID, Option<UUID>)>)>)>> {
let path: &Path = header.object.as_ref().unwrap().as_ref();
let path = match path.strip_prefix(self.blue_path.as_ref()) {
Ok(p) => p,
Err(_) => return None,
};
let first_comp = match path.components().next() {
Some(Component::Normal(p)) => p.to_str().unwrap(),
None => return Some(None),
_ => return None,
};
let mac = devmac_to_mac(first_comp)?;
let dev = self.devices.get_mut(&mac)?;
let uuids = dev.match_dev(path.strip_prefix(&first_comp).unwrap())?;
Some(Some((mac, uuids)))
}
fn match_services(&mut self, path: &Path) -> Option<(UUID, Option<(UUID, Option<UUID>)>)> {
let r_str = path.to_str().unwrap();
if (r_str.len() != 9 && r_str.len() != 18 && r_str.len() != 27) || &r_str[..4] != "serv" {
return None;
}
for uuid in self.get_children() {
if let Some(matc) = match_serv(&mut self.get_child(&uuid).unwrap(), path) {
return Some((uuid, matc));
//return Some(Some((uuid, matc)));
}
}
None
}
fn match_advertisement(&self, path: &Path) -> Option<usize> {
let r_str = path.to_str().unwrap();
if r_str.len() != 7 || &r_str[..4] != "adv" {
return None;
}
for (i, adv) in self.ads.iter().enumerate() {
if adv.path.file_name().unwrap() == path {
return Some(i);
}
}
None
}
/// Clear the known the devices from the local application.
/// This function does *not* remove the devices from the controller.
/// It merely causes the local application to forget them.
pub fn clear_devices(&mut self) {
self.devices.clear()
}
/// Used to get devices devices known to Bluez. This function does *not* trigger scan/discovery
/// on the Bluez controller. Use `set_scan()` to initiate actual device discovery.
///
/// **Calls process_requests()**
pub fn discover_devices(&mut self) -> Result<Vec<MAC>, Error> {
self.discover_devices_filter(self.blue_path.clone())
}
/*
/// **Unimplemented**
pub fn set_scan(&mut self, on: bool) -> Result<(), Error> {
let mut msg = MessageBuilder::new()
.call("Set".to_string())
.on(self.blue_path.to_str().unwrap().to_string())
.with_interface(PROP_IF_STR.to_string())
.at(BLUEZ_DEST.to_string())
.build();
msg.body.push_param2(ADAPTER_IF_STR, "Scan").unwrap();
let variant = Param::Container(Container::Variant(Box::new(params::Variant {
sig: rustbus::signature::Type::Base(rustbus::signature::Base::Boolean),
value: Param::Base(Base::Boolean(on)),
})));
msg.body.push_old_param(&variant).unwrap();
let res_idx = self.rpc_con.send_message(&mut msg, Timeout::Infinite)?;
loop {
self.process_requests()?;
if let Some(res) = self.rpc_con.try_get_response(res_idx) {
match res.typ {
MessageType::Reply => return Ok(()),
MessageType::Error => {
return Err(Error::DbusReqErr(format!(
"Set power call failed: {:?}",
res
)))
}
_ => unreachable!(),
}
}
}
}
*/
/// Tries to resolve a `Pending`. If the `Pending` is not ready, or an
/// error occurs it will return an `Err(ResolveError)`
///
/// **Sometimes Calls process_requests()**. If the `Pending` is fetching remote information
/// then this will call [`process_requests()`], but some `Pending` are already resolved when created,
/// such as reading a local characteristic. (These "PreResolved" `Pending`s exist to satisfy trait defitions.
pub fn try_resolve<T, V, U>(
&mut self,
mut pend: Pending<T, U>,
) -> Result<T, ResolveError<T, U>> {
debug_assert_eq!(Rc::as_ptr(&self.leaking), pend.leaking.as_ptr());
match pend.typ.as_ref().unwrap() {
PendingType::PreResolved(_) => match pend.typ.take().unwrap() {
PendingType::PreResolved(t) => Ok(t),
_ => unreachable!(),
},
PendingType::MessageCb(cb) => {
if let Err(e) = self.process_requests() {
return Err(ResolveError::Error(pend, e));
}
match self.rpc_con.try_get_response(pend.dbus_res) {
Some(res) => {
let data = pend.data.take().unwrap();
let ret = (cb)(res, data);
pend.typ.take();
Ok(ret)
}
None => Err(ResolveError::StillPending(pend)),
}
}
}
}
/// Resolve a `Pending` by waiting for its response.
///
/// **Sometimes Calls process_requests()**. If the `Pending` is fetching remote information
/// then this will call [`process_requests()`], but some `Pending` are already resolved when created,
/// such as reading a local characteristic. (These "PreResolved" `Pending`s exist to satisfy trait defitions.
pub fn resolve<T, U>(&mut self, mut pend: Pending<T, U>) -> Result<T, (Pending<T, U>, Error)> {
debug_assert_eq!(Rc::as_ptr(&self.leaking), pend.leaking.as_ptr());
match pend.typ.take().unwrap() {
PendingType::PreResolved(t) => Ok(t),
PendingType::MessageCb(cb) => loop {
if let Err(e) = self.process_requests() {
break Err((pend, e));
}
if let Some(res) = self.rpc_con.try_get_response(pend.dbus_res) {
let data = pend.data.take().unwrap();
let ret = (cb)(res, data);
break Ok(ret);
}
},
}
}
fn discover_devices_filter<'a, T: AsRef<Path>>(
&mut self,
filter_path: T,
) -> Result<Vec<MAC>, Error> {
self.devices.clear();
let mut msg = MessageBuilder::new()
.call(MANGAGED_OBJ_CALL.to_string())
.at(BLUEZ_DEST.to_string())
.on("/".to_string())
.with_interface(OBJ_MANAGER_IF_STR.to_string())
.build();
let res_idx = self.rpc_con.send_message(&mut msg, Timeout::Infinite)?;
loop {
self.process_requests()?;
if let Some(res) = self.rpc_con.try_get_response(res_idx) {
if let MessageType::Error = res.typ {
return Err(Error::DbusReqErr(format!(
"Failed to discover device: {:?}",
res
)));
}
let filter_path = filter_path.as_ref();
let path_map: HashMap<
path::ObjectPathBuf,
HashMap<String, HashMap<String, Variant>>,
> = res.body.parser().get()?;
let mut pairs: Vec<(
path::ObjectPathBuf,
HashMap<String, HashMap<String, Variant>>,
)> = path_map
.into_iter()
.filter(|pair| pair.0.starts_with(filter_path))
.collect();
pairs.sort_by(|a, b| a.0.cmp(&b.0));
let mut ret = Vec::new();
let mut device_base: Option<RemoteDeviceBase> = None;
let mut service_base: Option<RemoteServiceBase> = None;
let mut character_base: Option<RemoteCharBase> = None;
let mut descriptor_base: Option<RemoteDescBase> = None;
for (path, mut if_map) in pairs {
if let Some(dev_base_props) = if_map.remove(DEV_IF_STR) {
// handle adding remote device
if let Some(mut dev) = device_base {
if let Some(mut serv_base) = service_base.take() {
if let Some(mut char_base) = character_base.take() {
if let Some(desc_base) = descriptor_base.take() {
char_base.descs.insert(desc_base.uuid().clone(), desc_base);
}
serv_base.chars.insert(char_base.uuid().clone(), char_base);
}
dev.services.insert(serv_base.uuid().clone(), serv_base);
}
ret.push(dev.uuid().clone());
self.insert_device(dev);
}
device_base =
Some(RemoteDeviceBase::from_props(dev_base_props, path.into())?);
} else if let Some(dev_base) = &mut device_base {
if let Some(serv_base_props) = if_map.remove(SERV_IF_STR) {
// add remote service
if !path.starts_with(&dev_base.path) {
continue;
}
if let Some(mut serv_base) = service_base {
if let Some(mut char_base) = character_base.take() {
if let Some(desc_base) = descriptor_base.take() {
char_base.descs.insert(desc_base.uuid().clone(), desc_base);
}
serv_base.chars.insert(char_base.uuid().clone(), char_base);
}
dev_base
.services
.insert(serv_base.uuid().clone(), serv_base);
}
service_base =
Some(RemoteServiceBase::from_props(serv_base_props, path.into())?);
} else if let Some(serv_base) = &mut service_base {
if let Some(char_base_props) = if_map.remove(CHAR_IF_STR) {
if !path.starts_with(serv_base.path()) {
continue;
}
if let Some(mut char_base) = character_base {
if let Some(desc_base) = descriptor_base.take() {
char_base.descs.insert(desc_base.uuid().clone(), desc_base);
}
serv_base.chars.insert(char_base.uuid().clone(), char_base);
}
character_base =
Some(RemoteCharBase::from_props(char_base_props, path.into())?);
} else if let Some(char_base) = &mut character_base {
if let Some(desc_base_props) = if_map.remove(DESC_IF_STR) {
if !path.starts_with(char_base.path()) {
continue;
}
if let Some(desc_base) = descriptor_base {
char_base.descs.insert(desc_base.uuid().clone(), desc_base);
}
descriptor_base = Some(RemoteDescBase::from_props(
desc_base_props,
path.into(),
)?);
}
}
}
}
}
// handle stragalers
if let Some(mut dev_base) = device_base {
if let Some(mut serv_base) = service_base {
if let Some(mut char_base) = character_base {
if let Some(desc_base) = descriptor_base {
char_base.descs.insert(desc_base.uuid().clone(), desc_base);
}
serv_base.chars.insert(char_base.uuid().clone(), char_base);
}
dev_base
.services
.insert(serv_base.uuid().clone(), serv_base);
}
self.devices.insert(dev_base.uuid().clone(), dev_base);
}
return Ok(ret);
}
}
}
fn insert_device(&mut self, device: RemoteDeviceBase) {
let devmac = device.mac.clone();
let comp = device.path.file_name().unwrap().to_os_string();
self.devices.insert(devmac.clone(), device);
self.comp_map.insert(comp, devmac);
}
/// Get a device from the Bluez controller.
///
/// **Calls process_requests()**
pub fn discover_device(&mut self, mac: &MAC) -> Result<(), Error> {
let devmac: PathBuf = match mac_to_devmac(mac) {
Some(devmac) => devmac,
None => return Err(Error::BadInput("Invalid mac was given".to_string())),
}
.into();
self.discover_devices_filter(&self.blue_path.join(devmac))
.map(|_| ())
}
}
/// Gets the underlying `RawFd` used to talk to DBus.
/// This can be useful for using `select`/`poll` to check
/// if [`process_requests`] needs to be called.
impl AsRawFd for Bluetooth {
fn as_raw_fd(&self) -> RawFd {
unsafe {
// SAFETY: This is safe because as_raw_fd() use an immutable receiver (&self)
// We need this unsafe code because conn_mut() is only accessible with a mutable reference
let rpc_con = &self.rpc_con as *const RpcConn as *mut RpcConn;
let rpc_con = &mut *rpc_con;
rpc_con.conn_mut().as_raw_fd()
}
}
}
pub fn mac_to_devmac(mac: &MAC) -> Option<String> {
if !validate_mac(mac) {
return None;
}
let mut ret = String::with_capacity(21);
ret.push_str("dev");
for i in 0..6 {
let tar = i * 3;
ret.push('_');
ret.push_str(&mac[tar..tar + 2]);
}
Some(ret)
}
pub fn validate_devmac(devmac: &str) -> bool {
if devmac.len() != 21 {
return false;
}
if !devmac.starts_with("dev") {
return false;
}
let devmac = &devmac[3..];
let mut chars = devmac.chars();
for _ in 0..6 {
if chars.next().unwrap() != '_' {
return false;
}
if chars.next().unwrap().is_lowercase() || chars.next().unwrap().is_lowercase() {
return false;
}
}
true
}
pub fn devmac_to_mac(devmac: &str) -> Option<MAC> {
if !validate_devmac(&devmac) {
return None;
}
let mut ret = String::with_capacity(17);
let devmac = &devmac[3..];
for i in 0..5 {
let tar = i * 3 + 1;
ret.push_str(&devmac[tar..tar + 2]);
ret.push(':');
}
ret.push_str(&devmac[16..18]);
Some(ret.into())
}
/*
pub fn unknown_method<'a, 'b>(call: &Message<'_,'_>) -> Message<'a,'b> {
let text = format!(
"No calls to {}.{} are accepted for object {}",
call.interface.clone().unwrap_or_else(|| "".to_owned()),
call.member.clone().unwrap_or_else(|| "".to_owned()),
call.object.clone().unwrap_or_else(|| "".to_owned()),
);
let err_name = "org.freedesktop.DBus.Error.UnknownMethod".to_owned();
let mut err_resp = Message {
typ: MessageType::Reply,
interface: None,
member: None,
params: Vec::new(),
object: None,
destination: call.sender.clone(),
serial: None,
raw_fds: Vec::new(),
num_fds: None,
sender: None,
response_serial: call.serial,
dynheader.error_name: Some(err_name),
flags: 0,
};
err_resp.push_param(text);
err_resp
}
*/
trait ObjectManager {
fn objectmanager_call(&mut self, msg: MarshalledMessage) -> MarshalledMessage {
match msg.dynheader.member.as_ref().unwrap().as_ref() {
MANGAGED_OBJ_CALL => self.get_managed_object(msg),
_ => standard_messages::unknown_method(&msg.dynheader),
}
}
fn object_manager_type() -> signature::Type {
signature::Type::Container(signature::Container::Dict(
signature::Base::String,
Box::new(LocalServiceBase::get_all_type()),
))
}
fn get_managed_object(&mut self, msg: MarshalledMessage) -> MarshalledMessage;
}
impl ObjectManager for Bluetooth {
fn get_managed_object(&mut self, msg: MarshalledMessage) -> MarshalledMessage {
let mut reply = msg.dynheader.make_response();
let mut outer_dict: HashMap<Base, Param> = HashMap::new();
for service in self.services.values_mut() {
//let service_path = path.join(format!("service{:02x}", service.index));
for characteristic in service.chars.values_mut() {
for desc in characteristic.descs.values_mut() {
let mut middle_map = HashMap::new();
for interface in LocalDescBase::INTERFACES {
let props = desc.get_all_inner(interface.0).unwrap();
middle_map.insert(interface.0.to_string().into(), props);
}
let middle_cont: Container = (
signature::Base::String,
LocalDescBase::get_all_type(),
middle_map,
)
.try_into()
.unwrap();
outer_dict.insert(
Base::ObjectPath(desc.path.to_str().unwrap().to_string()),
middle_cont.into(),
);
}
let mut middle_map = HashMap::new();
for interface in LocalCharBase::INTERFACES {
let props = characteristic.get_all_inner(interface.0).unwrap();
middle_map.insert(interface.0.to_string().into(), props);
}
let middle_cont: Container = (
signature::Base::String,
LocalCharBase::get_all_type(),
middle_map,
)
.try_into()
.unwrap();
outer_dict.insert(
Base::ObjectPath(characteristic.path.to_str().unwrap().to_string()),
middle_cont.into(),
);
}
let mut middle_map = HashMap::new();
for interface in LocalServiceBase::INTERFACES {
let props = service.get_all_inner(interface.0).unwrap();
middle_map.insert(interface.0.to_string().into(), props);
}
let middle_cont: Container = (
signature::Base::String,
LocalServiceBase::get_all_type(),
middle_map,
)
.try_into()
.unwrap();
outer_dict.insert(
Base::ObjectPath(service.path.to_str().unwrap().to_string()),
middle_cont.into(),
);
}
//let outer_param: Result<Param, std::convert::Infallible> = outer_dict.try_into();
let outer_cont: Container = (
signature::Base::ObjectPath,
Self::object_manager_type(),
outer_dict,
)
.try_into()
.unwrap();
reply.body.push_old_param(&outer_cont.into()).unwrap();
reply
}
}
trait Properties {
const GET_ALL_ITEM: signature::Type = signature::Type::Container(signature::Container::Variant);
fn get_all_type() -> signature::Type {
signature::Type::Container(signature::Container::Dict(
signature::Base::String,
Box::new(Self::GET_ALL_ITEM),
))
}
const INTERFACES: &'static [(&'static str, &'static [&'static str])];
fn properties_call(&mut self, msg: MarshalledMessage) -> MarshalledMessage {
match msg.dynheader.member.as_ref().unwrap().as_ref() {
"Get" => self.get(msg),
"Set" => self.set(msg),
GET_ALL_CALL => self.get_all(msg),
_ => standard_messages::unknown_method(&msg.dynheader),
}
}
fn get_all_inner<'a, 'b>(&mut self, interface: &str) -> Option<Param<'a, 'b>> {
let props = Self::INTERFACES
.iter()
.find(|i| interface == i.0)
.map(|i| i.1)?;
let mut prop_map = HashMap::new();
for prop in props {
//eprintln!("{}: {}", interface, prop);
let val = self.get_inner(interface, prop).unwrap();
prop_map.insert(prop.to_string().into(), val);
}
let prop_cont = Container::Dict(params::Dict {
key_sig: signature::Base::String,
value_sig: Self::GET_ALL_ITEM,
map: prop_map,
});
/*let prop_cont: Container = (signature::Base::String, Self::GET_ALL_ITEM, prop_map)
.try_into()
.unwrap();*/
//Some(prop_cont.into())
Some(Param::Container(prop_cont))
}
fn get_all(&mut self, msg: MarshalledMessage) -> MarshalledMessage {
let msg = msg.unmarshall_all().unwrap();
let interface = if let Some(Param::Base(Base::String(interface))) = msg.params.get(0) {
// eprintln!("get_all() interface: {}", interface);
interface
} else {
return msg
.dynheader
.make_error_response("Missing interface".to_string(), None);
};
if let Some(param) = self.get_all_inner(&interface) {
let mut res = msg.make_response();
res.body.push_old_param(¶m).unwrap();
res
} else {
let err_msg = format!(
"Interface {} is not known on {}",
interface,
msg.dynheader.object.as_ref().unwrap()
);
msg.dynheader
.make_error_response("InterfaceNotFound".to_string(), Some(err_msg))
}
}
fn get(&mut self, msg: MarshalledMessage) -> MarshalledMessage {
let msg = msg.unmarshall_all().unwrap();
if msg.params.len() < 2 {
let err_str = "Expected two string arguments".to_string();
return msg
.dynheader
.make_error_response("Invalid arguments".to_string(), Some(err_str));
}
let interface = if let Param::Base(Base::String(interface)) = &msg.params[0] {
interface
} else {
let err_str = "Expected string interface as first argument!".to_string();
return msg
.dynheader
.make_error_response("Invalid arguments".to_string(), Some(err_str));
};
let prop = if let Param::Base(Base::String(prop)) = &msg.params[1] {
prop
} else {
let err_str = "Expected string property as second argument!".to_string();
return msg
.dynheader
.make_error_response("Invalid arguments".to_string(), Some(err_str));
};
if let Some(param) = self.get_inner(interface, prop) {
let mut reply = msg.make_response();
reply.body.push_old_param(¶m).unwrap();
reply
} else {
let s = format!("Property {} on interface {} not found.", prop, interface);
msg.dynheader
.make_error_response("PropertyNotFound".to_string(), Some(s))
}
}
/// Should returng a variant containing if the property is found. If it is not found then it returns None.
fn get_inner<'a, 'b>(&mut self, interface: &str, prop: &str) -> Option<Param<'a, 'b>>;
fn set_inner(&mut self, interface: &str, prop: &str, val: Variant) -> Option<String>;
fn set(&mut self, msg: MarshalledMessage) -> MarshalledMessage {
let (interface, prop, var): (&str, &str, Variant) = match msg.body.parser().get3() {
Ok(vals) => vals,
Err(err) => {
return msg.dynheader.make_error_response(
"InvalidParameters".to_string(),
Some(format!("{:?}", err)),
)
}
};
if let Some(err_str) = self.set_inner(interface, prop, var) {
msg.dynheader.make_error_response(err_str, None)
} else {
msg.dynheader.make_response()
}
}
}
impl Properties for Bluetooth {
const INTERFACES: &'static [(&'static str, &'static [&'static str])] = &[];
fn get_inner<'a, 'b>(&mut self, _interface: &str, _prop: &str) -> Option<Param<'a, 'b>> {
None
}
fn set_inner(&mut self, _interface: &str, _prop: &str, _val: Variant) -> Option<String> {
unimplemented!()
}
}
fn base_param_to_variant(b: Base) -> Param {
let var = match b {
Base::String(s) => params::Variant {
sig: signature::Type::Base(signature::Base::String),
value: Param::Base(s.into()),
},
Base::Boolean(b) => params::Variant {
sig: signature::Type::Base(signature::Base::Boolean),
value: Param::Base(b.into()),
},
Base::Uint16(u) => params::Variant {
sig: signature::Type::Base(signature::Base::Uint16),
value: Param::Base(u.into()),
},
Base::ObjectPath(p) => params::Variant {
sig: signature::Type::Base(signature::Base::ObjectPath),
value: Param::Base(Base::ObjectPath(p)),
},
Base::Byte(b) => params::Variant {
sig: signature::Type::Base(signature::Base::Byte),
value: Param::Base(b.into()),
},
Base::Uint64(b) => params::Variant {
sig: rustbus::signature::Type::Base(signature::Base::Uint64),
value: Param::Base(b.into()),
},
_ => unimplemented!(),
};
Param::Container(Container::Variant(Box::new(var)))
}
fn container_param_to_variant<'a, 'b>(c: Container<'a, 'b>) -> Param<'a, 'b> {
let var = match c {
Container::Dict(dict) => params::Variant {
sig: signature::Type::Container(rustbus::signature::Container::Dict(
dict.key_sig.clone(),
Box::new(dict.value_sig.clone()),
)),
value: Param::Container(Container::Dict(dict)),
},
Container::Array(array) => params::Variant {
sig: rustbus::signature::Type::Container(rustbus::signature::Container::Array(
Box::new(array.element_sig.clone()),
)),
value: Param::Container(Container::Array(array)),
},
_ => unimplemented!(),
};
Param::Container(Container::Variant(Box::new(var)))
}
pub fn validate_uuid(uuid: &str) -> bool {
if uuid.len() != 36 {
return false;
}
let mut uuid_chars = uuid.chars();
if uuid_chars.nth(8).unwrap() != '-' {
return false;
}
for _ in 0..3 {
if uuid_chars.nth(4).unwrap() != '-' {
return false;
}
}
let parse = |uuid: &str| -> Result<(), ParseIntError> {
u128::from_str_radix(&uuid[..8], 16)?;
u128::from_str_radix(&uuid[9..13], 16)?;
u128::from_str_radix(&uuid[14..18], 16)?;
u128::from_str_radix(&uuid[19..23], 16)?;
u128::from_str_radix(&uuid[24..36], 16)?;
Ok(())
};
if let Ok(_) = parse(uuid) {
true
} else {
false
}
}
struct Variant<'buf> {
sig: signature::Type,
byteorder: ByteOrder,
offset: usize,
buf: &'buf [u8],
}
impl<'r, 'buf: 'r> Variant<'buf> {
pub fn get_value_sig(&self) -> &signature::Type {
&self.sig
}
pub fn get<T: Unmarshal<'r, 'buf>>(&self) -> Result<T, UnmarshalError> {
if self.sig != T::signature() {
return Err(UnmarshalError::WrongSignature);
}
T::unmarshal(self.byteorder, self.buf, self.offset).map(|r| r.1)
}
}
impl Signature for Variant<'_> {
fn signature() -> signature::Type {
signature::Type::Container(signature::Container::Variant)
}
fn alignment() -> usize {
Variant::signature().get_alignment()
}
}
impl<'r, 'buf: 'r> Unmarshal<'r, 'buf> for Variant<'buf> {
fn unmarshal(
byteorder: ByteOrder,
buf: &'buf [u8],
offset: usize,
) -> unmarshal::UnmarshalResult<Self> {
// let padding = rustbus::wire::util::align_offset(Self::get_alignment());
let (mut used, desc) = rustbus::wire::util::unmarshal_signature(&buf[offset..])?;
let mut sigs = match signature::Type::parse_description(desc) {
Ok(sigs) => sigs,
Err(_) => return Err(UnmarshalError::WrongSignature),
};
if sigs.len() != 1 {
return Err(UnmarshalError::WrongSignature);
}
let sig = sigs.remove(0);
used += rustbus::wire::util::align_offset(sig.get_alignment(), buf, offset + used)?;
let start_loc = offset + used;
used += rustbus::wire::validate_raw::validate_marshalled(byteorder, start_loc, buf, &sig)
.map_err(|e| e.1)?;
Ok((
used,
Variant {
sig,
buf: &buf[..offset + used],
offset: start_loc,
byteorder,
},
))
}
}
impl Marshal for Variant<'_> {
fn marshal(&self, byteorder: ByteOrder, buf: &mut Vec<u8>) -> Result<(), rustbus::Error> {
if let ByteOrder::LittleEndian = byteorder {
if let ByteOrder::BigEndian = self.byteorder {
panic!("Byte order mismatch");
}
} else {
if let ByteOrder::LittleEndian = self.byteorder {
panic!("Byte order mismatch");
}
}
let mut sig_str = String::new();
self.sig.to_str(&mut sig_str);
debug_assert!(sig_str.len() <= 255);
buf.push(sig_str.len() as u8);
buf.extend_from_slice(sig_str.as_bytes());
rustbus::wire::util::pad_to_align(self.sig.get_alignment(), buf);
buf.extend_from_slice(&self.buf[self.offset..]);
Ok(())
}
}
|
#![feature(box_syntax, box_patterns)]
use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
where P: AsRef<Path>, {
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
fn main() {
let filename = "/home/remy/AOC/2020/15/input";
let mut memory = box [u32::MAX; 30000000];
if let Ok(lines) = read_lines(filename) {
let mut latest_spoken: u32;
let mut i: u32 = 0;
for line in lines {
if let Ok(line) = line {
let numbers: Vec<&str> = line.split(",").collect();
for (idx, number) in numbers.iter().enumerate() {
i += 1;
memory[number.parse::<u32>().unwrap() as usize] = (1+idx) as u32;
}
}
}
latest_spoken = 0;
i += 1;
for i in i..30000000 {
unsafe {
let tmp = memory.get_unchecked_mut(latest_spoken as usize);
match tmp {
&mut u32::MAX => {
*tmp = i;
latest_spoken = 0;
},
&mut entry => {
memory[latest_spoken as usize] = i;
latest_spoken = i-entry;
}
}
}
}
println!("{}", latest_spoken);
} else {
println!("Error");
return;
}
} |
use super::*;
use nom::{
multi::{fold_many0, many0},
sequence::pair,
};
// See `https://doc.rust-lang.org/reference/expressions.html#expression-precedence`
// for expression precedence
pub fn expr(input: Input) -> IResult<Expr> {
struct_expr.or(enum_expr).or(expr_no_struct).parse(input)
}
pub fn expr_no_struct(input: Input) -> IResult<Expr> {
lambda_expr
.or(return_expr)
.or(break_expr)
.or(continue_expr)
.or(assign_expr)
.or(or_expr)
.parse(input)
}
fn lambda_expr(input: Input) -> IResult<Expr> {
let (input, params) = param_list.parse(input)?;
let (input, fat_arrow) = fat_arrow.parse(input)?;
let (input, expr) = expr.parse(input)?;
Ok((
input,
Expr::Lambda(LambdaExpr {
params,
fat_arrow,
expr: box expr,
}),
))
}
fn struct_expr(input: Input) -> IResult<Expr> {
let (input, name) = var.parse(input)?;
let (input, fields) = curly(punctuated0(field_init, comma)).parse(input)?;
Ok((input, Expr::Struct(StructExpr { name, fields })))
}
fn field_init(input: Input) -> IResult<FieldInit> {
let (input, name) = var.parse(input)?;
let (input, expr) = (pair(colon, expr)).opt().parse(input)?;
Ok((input, FieldInit { name, expr }))
}
fn enum_expr(input: Input) -> IResult<Expr> {
let (input, name) = var.parse(input)?;
let (input, colon_colon) = colon_colon.parse(input)?;
let (input, variant) = var.parse(input)?;
let (input, fields) = curly(punctuated0(field_init, comma)).parse(input)?;
Ok((
input,
Expr::Enum(EnumExpr {
name,
colon_colon,
variant,
fields,
}),
))
}
fn return_expr(input: Input) -> IResult<Expr> {
let (input, kw_return) = kw_return.parse(input)?;
let (input, expr) = expr.opt().parse(input)?;
Ok((
input,
Expr::Return(ReturnExpr {
kw_return,
expr: expr.map(Box::new),
}),
))
}
fn break_expr(input: Input) -> IResult<Expr> {
let (input, kw_break) = kw_break.parse(input)?;
let (input, expr) = expr.opt().parse(input)?;
Ok((
input,
Expr::Break(BreakExpr {
kw_break,
expr: expr.map(Box::new),
}),
))
}
fn continue_expr(input: Input) -> IResult<Expr> {
let (input, kw_continue) = kw_continue.parse(input)?;
Ok((input, Expr::Continue(ContinueExpr { kw_continue })))
}
fn assign_op(input: Input) -> IResult<Binop> { eq.map(Binop::Assign).parse(input) }
fn assign_expr(input: Input) -> IResult<Expr> {
let (input, lhs) = or_expr.parse(input)?;
let (input, op) = assign_op.parse(input)?;
let (input, rhs) = expr.parse(input)?;
Ok((
input,
Expr::Binary(BinaryExpr {
lhs: box lhs,
op,
rhs: box rhs,
}),
))
}
fn or_op(input: Input) -> IResult<Binop> { or_or.map(Binop::Or).parse(input) }
fn or_expr(input: Input) -> IResult<Expr> {
let (input, init) = and_expr.parse(input)?;
fold_many0(pair(or_op, and_expr), init, |lhs, (op, rhs)| {
Expr::Binary(BinaryExpr {
lhs: box lhs,
op,
rhs: box rhs,
})
})
.parse(input)
}
fn and_op(input: Input) -> IResult<Binop> { and_and.map(Binop::And).parse(input) }
fn and_expr(input: Input) -> IResult<Expr> {
let (input, init) = cmp_expr.parse(input)?;
fold_many0(pair(and_op, cmp_expr), init, |lhs, (op, rhs)| {
Expr::Binary(BinaryExpr {
lhs: box lhs,
op,
rhs: box rhs,
})
})
.parse(input)
}
fn cmp_op(input: Input) -> IResult<Binop> {
(eq_eq.map(Binop::Eq))
.or(bang_eq.map(Binop::NotEq))
.or(less.map(Binop::Less))
.or(less_eq.map(Binop::LessEq))
.or(greater.map(Binop::Greater))
.or(greater_eq.map(Binop::GreaterEq))
.parse(input)
}
fn cmp_expr(input: Input) -> IResult<Expr> {
let (input, init) = add_expr.parse(input)?;
fold_many0(pair(cmp_op, add_expr), init, |lhs, (op, rhs)| {
Expr::Binary(BinaryExpr {
lhs: box lhs,
op,
rhs: box rhs,
})
})
.parse(input)
}
fn add_op(input: Input) -> IResult<Binop> {
(plus.map(Binop::Add))
.or(minus.map(Binop::Sub))
.parse(input)
}
fn add_expr(input: Input) -> IResult<Expr> {
let (input, init) = mul_expr.parse(input)?;
fold_many0(pair(add_op, mul_expr), init, |lhs, (op, rhs)| {
Expr::Binary(BinaryExpr {
lhs: box lhs,
op,
rhs: box rhs,
})
})
.parse(input)
}
fn mul_op(input: Input) -> IResult<Binop> {
(star.map(Binop::Mul))
.or(slash.map(Binop::Div))
.or(percent.map(Binop::Rem))
.parse(input)
}
fn mul_expr(input: Input) -> IResult<Expr> {
let (input, init) = unary_expr.parse(input)?;
fold_many0(pair(mul_op, unary_expr), init, |lhs, (op, rhs)| {
Expr::Binary(BinaryExpr {
lhs: box lhs,
op,
rhs: box rhs,
})
})
.parse(input)
}
fn unary_op(input: Input) -> IResult<Unop> {
bang.map(Unop::Not)
.or(plus.map(Unop::Add))
.or(minus.map(Unop::Sub))
.parse(input)
}
fn unary_expr(input: Input) -> IResult<Expr> {
pair(unary_op, unary_expr)
.map(|(op, expr)| Expr::Unary(UnaryExpr { op, expr: box expr }))
.or(suffix_expr)
.parse(input)
}
enum Suffix {
Call(ArgList),
Field(Dot, Field),
}
fn arg_list(input: Input) -> IResult<ArgList> {
paren(punctuated0(expr, comma)).map(ArgList).parse(input)
}
fn field(input: Input) -> IResult<Field> {
(dec_int.map(Field::Tuple))
.or(var.map(Field::Named))
.parse(input)
}
fn suffix(input: Input) -> IResult<Suffix> {
(arg_list.map(Suffix::Call))
.or(pair(dot, field).map(|(dot, field)| Suffix::Field(dot, field)))
.parse(input)
}
fn suffix_expr(input: Input) -> IResult<Expr> {
let (input, init) = atom_expr.parse(input)?;
fold_many0(suffix, init, |expr, suffix| match suffix {
Suffix::Call(args) => Expr::Call(CallExpr {
func: box expr,
args,
}),
Suffix::Field(dot, field) => Expr::Field(FieldExpr {
base: box expr,
dot,
field,
}),
})
.parse(input)
}
fn atom_expr(input: Input) -> IResult<Expr> {
lit_expr
.or(var_expr)
.or(paren_expr)
.or(tuple_expr)
.or(loop_expr)
.or(if_expr)
.or(match_expr)
.or(block_expr)
.parse(input)
}
fn lit_expr(input: Input) -> IResult<Expr> { lit.map(Expr::Lit).parse(input) }
fn var_expr(input: Input) -> IResult<Expr> { var.map(Expr::Var).parse(input) }
fn paren_expr(input: Input) -> IResult<Expr> { paren(expr).map(Expr::Paren).parse(input) }
fn tuple_expr(input: Input) -> IResult<Expr> { tuple(expr).map(Expr::Tuple).parse(input) }
fn if_expr(input: Input) -> IResult<Expr> {
let (input, kw_if) = kw_if.parse(input)?;
let (input, test_expr) = expr_no_struct.parse(input)?;
let (input, then_branch) = block_expr.parse(input)?;
let (input, else_branch) = else_expr.opt().parse(input)?;
Ok((
input,
Expr::If(IfExpr {
kw_if,
test_expr: box test_expr,
then_branch: box then_branch,
else_branch,
}),
))
}
fn else_expr(input: Input) -> IResult<ElseExpr> {
let (input, kw_else) = kw_else.parse(input)?;
(if_expr.map(|if_expr| ElseExpr::ElseIf {
kw_else,
if_expr: box if_expr,
}))
.or(block_expr.map(|block| ElseExpr::ElseBlock {
kw_else,
block: box block,
}))
.parse(input)
}
fn match_expr(input: Input) -> IResult<Expr> {
let (input, kw_match) = kw_match.parse(input)?;
let (input, test_expr) = expr_no_struct.parse(input)?;
let (input, cases) = curly(punctuated0(match_case, comma)).parse(input)?;
Ok((
input,
Expr::Match(MatchExpr {
kw_match,
test_expr: box test_expr,
cases,
}),
))
}
fn match_case(input: Input) -> IResult<MatchCase> {
let (input, pat) = pat.parse(input)?;
let (input, fat_arrow) = fat_arrow.parse(input)?;
let (input, expr) = expr.parse(input)?;
Ok((
input,
MatchCase {
pat,
fat_arrow,
expr,
},
))
}
fn loop_expr(input: Input) -> IResult<Expr> {
let (input, kw_loop) = kw_loop.parse(input)?;
let (input, expr) = block_expr.parse(input)?;
Ok((
input,
Expr::Loop(LoopExpr {
kw_loop,
expr: box expr,
}),
))
}
pub fn block(input: Input) -> IResult<Block> {
let (input, lcurly) = lcurly.parse(input)?;
let (input, stmts) = many0(stmt).parse(input)?;
let (input, expr) = expr.opt().parse(input)?;
let (input, rcurly) = rcurly.parse(input)?;
let (stmts, expr) = match stmts.split_last() {
Some((
Stmt::Expr {
expr,
semicolon: None,
},
stmts,
)) => (stmts.to_vec(), Some(expr.clone())),
_ => (stmts, expr),
};
Ok((
input,
Block {
lcurly,
stmts,
expr: box expr,
rcurly,
},
))
}
pub fn block_expr(input: Input) -> IResult<Expr> { block.map(Expr::Block).parse(input) }
fn stmt(input: Input) -> IResult<Stmt> {
blocklike_expr_stmt
.or(expr_stmt)
.or(let_stmt)
.or(semicolon_stmt)
.parse(input)
}
fn blocklike_expr_stmt(input: Input) -> IResult<Stmt> {
let (input, expr) = if_expr.or(loop_expr).or(block_expr).parse(input)?;
Ok((
input,
Stmt::Expr {
expr,
semicolon: None,
},
))
}
fn expr_stmt(input: Input) -> IResult<Stmt> {
let (input, expr) = expr.parse(input)?;
let (input, semicolon) = semicolon.parse(input)?;
Ok((
input,
Stmt::Expr {
expr,
semicolon: Some(semicolon),
},
))
}
fn let_stmt(input: Input) -> IResult<Stmt> {
let (input, kw_let) = kw_let.parse(input)?;
let (input, pat) = pat.parse(input)?;
let (input, ascription) = ascription.opt().parse(input)?;
let (input, eq) = eq.parse(input)?;
let (input, expr) = expr.parse(input)?;
let (input, semicolon) = semicolon.parse(input)?;
Ok((
input,
(Stmt::Let {
kw_let,
pat,
ascription,
eq,
expr,
semicolon,
}),
))
}
fn semicolon_stmt(input: Input) -> IResult<Stmt> { semicolon.map(Stmt::Semicolon).parse(input) }
#[cfg(test)]
mod tests {
use super::*;
test_parse!(true_expr, expr, r#"true"#);
test_parse!(false_expr, expr, r#"false"#);
test_parse!(int_expr, expr, r#"123"#);
test_parse!(assign_expr, expr, r#"1=2"#);
test_parse!(nested_assign_expr, expr, r#"1=2=3"#);
test_parse!(add_expr, expr, r#"1+2"#);
test_parse!(nested_add_expr, expr, r#"1+2+3"#);
test_parse!(mul_expr, expr, r#"1*2"#);
test_parse!(nested_mul_expr, expr, r#"1*2*3"#);
test_parse!(prefix_expr, expr, r#"-2"#);
test_parse!(nested_prefix_expr, expr, r#"--2"#);
test_parse!(call_expr, expr, r#"f()"#);
test_parse!(nested_call_expr, expr, r#"f()()"#);
test_parse!(tuple_field_expr, expr, r#"x.0"#);
test_parse!(named_field_expr, expr, r#"x.y"#);
test_parse!(nested_field_expr, expr, r#"x._0._0"#);
test_parse!(mixed_expr, expr, r#"-1+2"#);
test_parse!(lambda_expr, expr, r#"() => 1"#);
test_parse!(nested_lambda_expr, expr, r#"(x) => (_) => x"#);
test_parse!(if_expr, expr, r#"if true {}"#);
test_parse!(if_else_expr, expr, r#"if true {} else {}"#);
test_parse!(if_else_if_expr, expr, r#"if true {} else if false {}"#);
test_parse!(loop_expr, expr, r#"loop {}"#);
test_parse!(return_expr, expr, r#"return 5"#);
test_parse!(break_expr, expr, r#"break 5"#);
test_parse!(continue_expr, expr, r#"continue"#);
test_parse!(block_expr, expr, r#"{x; y; z}"#);
test_parse!(block_expr2, expr, r#"{if true {} loop {} {} x}"#);
test_parse!(block_expr3, expr, r#"{let x = 5; loop {}}"#);
test_parse!(struct_expr, expr, r#"Foo {x: 1, y: 2}"#);
test_parse!(enum_expr, expr, r#"Foo::Bar {x: 1, y: 2}"#);
test_parse!(match_expr, expr, r#"match 5 {x => x}"#);
}
|
use nu_protocol::{
ast::Call,
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, SyntaxShape,
};
#[derive(Clone)]
pub struct Exec;
impl Command for Exec {
fn name(&self) -> &str {
"exec"
}
fn signature(&self) -> Signature {
Signature::build("exec")
.required("command", SyntaxShape::String, "the command to execute")
.rest(
"rest",
SyntaxShape::String,
"any additional arguments for the command",
)
.category(Category::System)
}
fn usage(&self) -> &str {
"Execute a command, replacing the current process."
}
fn extra_usage(&self) -> &str {
"Currently supported only on Unix-based systems."
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
_input: PipelineData,
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
exec(engine_state, stack, call)
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Execute external 'ps aux' tool",
example: "exec ps aux",
result: None,
},
Example {
description: "Execute 'nautilus'",
example: "exec nautilus",
result: None,
},
]
}
}
#[cfg(unix)]
fn exec(
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
use std::os::unix::process::CommandExt;
use super::run_external::ExternalCommand;
use nu_engine::{current_dir, env_to_strings, CallExt};
use nu_protocol::ast::Expr;
use nu_protocol::Spanned;
let name: Spanned<String> = call.req(engine_state, stack, 0)?;
let name_span = name.span;
let args: Vec<Spanned<String>> = call.rest(engine_state, stack, 1)?;
let args_expr: Vec<nu_protocol::ast::Expression> =
call.positional_iter().skip(1).cloned().collect();
let mut arg_keep_raw = vec![];
for one_arg_expr in args_expr {
match one_arg_expr.expr {
// refer to `parse_dollar_expr` function
// the expression type of $variable_name, $"($variable_name)"
// will be Expr::StringInterpolation, Expr::FullCellPath
Expr::StringInterpolation(_) | Expr::FullCellPath(_) => arg_keep_raw.push(true),
_ => arg_keep_raw.push(false),
}
}
let cwd = current_dir(engine_state, stack)?;
let env_vars = env_to_strings(engine_state, stack)?;
let current_dir = current_dir(engine_state, stack)?;
let external_command = ExternalCommand {
name,
args,
arg_keep_raw,
env_vars,
redirect_stdout: true,
redirect_stderr: false,
};
let mut command = external_command.spawn_simple_command(&cwd.to_string_lossy())?;
command.current_dir(current_dir);
let err = command.exec(); // this replaces our process, should not return
Err(ShellError::GenericError(
"Error on exec".to_string(),
err.to_string(),
Some(name_span),
None,
Vec::new(),
))
}
#[cfg(not(unix))]
fn exec(
_engine_state: &EngineState,
_stack: &mut Stack,
call: &Call,
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
Err(ShellError::GenericError(
"Error on exec".to_string(),
"exec is not supported on your platform".to_string(),
Some(call.head),
None,
Vec::new(),
))
}
|
use crate::settings::Settings;
use crate::{
interaction::{
move_mode::MoveInteractionMode, navmesh::EditNavmeshMode,
rotate_mode::RotateInteractionMode, scale_mode::ScaleInteractionMode,
select_mode::SelectInteractionMode, terrain::TerrainInteractionMode,
},
scene::EditorScene,
GameEngine,
};
use rg3d::{
core::{
algebra::{Vector2, Vector3},
pool::Handle,
scope_profile,
},
gui::message::KeyCode,
scene::{graph::Graph, node::Node},
};
pub mod gizmo;
pub mod move_mode;
pub mod navmesh;
pub mod plane;
pub mod rotate_mode;
pub mod scale_mode;
pub mod select_mode;
pub mod terrain;
pub trait InteractionModeTrait {
fn on_left_mouse_button_down(
&mut self,
editor_scene: &mut EditorScene,
engine: &mut GameEngine,
mouse_pos: Vector2<f32>,
frame_size: Vector2<f32>,
);
fn on_left_mouse_button_up(
&mut self,
editor_scene: &mut EditorScene,
engine: &mut GameEngine,
mouse_pos: Vector2<f32>,
frame_size: Vector2<f32>,
);
fn on_mouse_move(
&mut self,
mouse_offset: Vector2<f32>,
mouse_position: Vector2<f32>,
camera: Handle<Node>,
editor_scene: &mut EditorScene,
engine: &mut GameEngine,
frame_size: Vector2<f32>,
settings: &Settings,
);
fn update(
&mut self,
editor_scene: &mut EditorScene,
camera: Handle<Node>,
engine: &mut GameEngine,
);
fn deactivate(&mut self, editor_scene: &EditorScene, engine: &mut GameEngine);
fn on_key_down(
&mut self,
_key: KeyCode,
_editor_scene: &mut EditorScene,
_engine: &mut GameEngine,
) {
}
fn on_key_up(
&mut self,
_key: KeyCode,
_editor_scene: &mut EditorScene,
_engine: &mut GameEngine,
) {
}
}
pub fn calculate_gizmo_distance_scaling(
graph: &Graph,
camera: Handle<Node>,
gizmo_origin: Handle<Node>,
) -> Vector3<f32> {
let distance = distance_scale_factor(graph[camera].as_camera().fov())
* graph[gizmo_origin]
.global_position()
.metric_distance(&graph[camera].global_position());
Vector3::new(distance, distance, distance)
}
fn distance_scale_factor(fov: f32) -> f32 {
fov.tan() * 0.1
}
/// Helper enum to be able to access interaction modes in array directly.
#[derive(Copy, Clone, PartialOrd, PartialEq, Hash, Debug)]
#[repr(usize)]
pub enum InteractionModeKind {
Select = 0,
Move = 1,
Scale = 2,
Rotate = 3,
Navmesh = 4,
Terrain = 5,
}
pub enum InteractionMode {
Select(SelectInteractionMode),
Move(MoveInteractionMode),
Scale(ScaleInteractionMode),
Rotate(RotateInteractionMode),
Navmesh(EditNavmeshMode),
Terrain(TerrainInteractionMode),
}
macro_rules! static_dispatch {
($self:ident, $func:ident, $($args:expr),*) => {
match $self {
InteractionMode::Select(v) => v.$func($($args),*),
InteractionMode::Move(v) => v.$func($($args),*),
InteractionMode::Scale(v) => v.$func($($args),*),
InteractionMode::Rotate(v) => v.$func($($args),*),
InteractionMode::Navmesh(v) => v.$func($($args),*),
InteractionMode::Terrain(v) => v.$func($($args),*),
}
}
}
impl InteractionModeTrait for InteractionMode {
fn on_left_mouse_button_down(
&mut self,
editor_scene: &mut EditorScene,
engine: &mut GameEngine,
mouse_pos: Vector2<f32>,
frame_size: Vector2<f32>,
) {
scope_profile!();
static_dispatch!(
self,
on_left_mouse_button_down,
editor_scene,
engine,
mouse_pos,
frame_size
)
}
fn on_left_mouse_button_up(
&mut self,
editor_scene: &mut EditorScene,
engine: &mut GameEngine,
mouse_pos: Vector2<f32>,
frame_size: Vector2<f32>,
) {
scope_profile!();
static_dispatch!(
self,
on_left_mouse_button_up,
editor_scene,
engine,
mouse_pos,
frame_size
)
}
fn on_mouse_move(
&mut self,
mouse_offset: Vector2<f32>,
mouse_position: Vector2<f32>,
camera: Handle<Node>,
editor_scene: &mut EditorScene,
engine: &mut GameEngine,
frame_size: Vector2<f32>,
settings: &Settings,
) {
scope_profile!();
static_dispatch!(
self,
on_mouse_move,
mouse_offset,
mouse_position,
camera,
editor_scene,
engine,
frame_size,
settings
)
}
fn update(
&mut self,
editor_scene: &mut EditorScene,
camera: Handle<Node>,
engine: &mut GameEngine,
) {
scope_profile!();
static_dispatch!(self, update, editor_scene, camera, engine)
}
fn deactivate(&mut self, editor_scene: &EditorScene, engine: &mut GameEngine) {
scope_profile!();
static_dispatch!(self, deactivate, editor_scene, engine)
}
fn on_key_down(
&mut self,
key: KeyCode,
editor_scene: &mut EditorScene,
engine: &mut GameEngine,
) {
scope_profile!();
static_dispatch!(self, on_key_down, key, editor_scene, engine)
}
fn on_key_up(&mut self, key: KeyCode, editor_scene: &mut EditorScene, engine: &mut GameEngine) {
scope_profile!();
static_dispatch!(self, on_key_up, key, editor_scene, engine)
}
}
|
pub mod optics;
pub mod traits;
pub use traits::{
Lens, LensMut, LensRef, Prism, PrismMut, PrismRef, Review, Traversal, TraversalMut,
TraversalRef,
};
pub use optics::{
_both, _mapped, _arc, _box, Err, _mut, _rc, _ref, _0, _1, _2, _3, _4, _5, _6,
__,
};
pub use lens_rs_derive::{Lens, Prism, Review};
#[macro_export]
macro_rules! optics {
() => { lens_rs::optics::__ };
($optic:ident) => { lens_rs::optics::$optic(lens_rs::optics::__) };
($optic:ident . $($optics:tt)*) => {
lens_rs::optics::$optic(optics!($($optics)*))
}
}
#[macro_export]
macro_rules! field {
[] => { lens_rs::optics::__ };
[$optic:ident] => { lens_rs::optics::$optic<lens_rs::optics::__> };
[$optic:ident . $($optics:tt)*] => {
lens_rs::optics::$optic<field![$($optics)*]>
}
}
|
//=============================================================================
// debug.rs
// Methods and macros for handling debug messages from the engine and application
//
// Created by Victor on 2019/10/24
//=============================================================================
use log::{debug, error, info, trace, warn};
pub fn log(s: &str) {
//println!("{}", s); // TODO: We should log to a file, etc
info!("{}", s);
}
pub enum LogType {
Error,
Warning,
Info
}
|
use std::fs::File;
use std::io::{self, Lines, BufReader, BufRead};
use std::path::Path;
fn is_valid(numbers:&Vec<i64>, pre: usize, curr: usize) -> bool {
let slice = &numbers[curr - pre..curr];
for x in slice {
for y in slice {
if (x != y) && (x + y == numbers[curr]) {
return true
}
}
}
return false;
// let candidates : Vec<(i64,i64)> = slice.into_iter()
// .map(|x| slice.into_iter().map(|y| (x.clone(),y.clone())))
// .flatten().collect();
// false
}
fn find_invalid(numbers:&Vec<i64>, pre: usize, curr: usize) -> i64 {
for i in curr..numbers.len() {
if !is_valid(numbers, pre, i) {
return numbers[i].clone();
}
}
panic!("No invalid numbers found");
}
fn find_range_last(numbers:&Vec<i64>,first:usize,target: i64) -> Option<usize> {
let mut sum = numbers[first];
for last in first+1..numbers.len() {
sum += numbers[last];
if sum > target {
return None
}
if sum == target {
return Some(last.clone());
}
}
return None
}
fn find_range(numbers:&Vec<i64>, target:i64) -> (usize,usize) {
for first in 0..numbers.len() {
if let Some(last) = find_range_last(&numbers,first,target) {
return (first,last);
}
}
panic!("No range found");
}
fn find_minmax(numbers:&Vec<i64>,first:usize,last:usize) -> (i64,i64) {
let mut min = numbers[first];
let mut max = numbers[first];
for i in first+1..last {
if numbers[i] > max {
max = numbers[i].clone();
}
if numbers[i] < min {
min = numbers[i].clone();
}
}
(min,max)
}
fn solve_tasks(numbers:Vec<i64>, pre: usize) {
let invalid = find_invalid(&numbers, pre,pre);
println!("Answer 1: {}", invalid);
let (first,last) = find_range(&numbers, invalid);
let (min,max) = find_minmax(&numbers, first,last);
println!("Answer 2: [{},{}] => [{}+{}] = {}",first,last,min,max,min+max);
}
fn main() {
if let Ok(lines) = read_lines("input.txt") {
let input : Vec<i64> = lines.into_iter().map(|s| s.unwrap())
.map(|s|s.parse().unwrap()).collect();
solve_tasks(input,25);
} else {
println!("Error reading file");
}
}
fn read_lines<P>(filename: P) -> io::Result<Lines<BufReader<File>>>
where P: AsRef<Path>, {
let file = File::open(filename)?;
Ok(BufReader::new(file).lines())
} |
//! Responsavel pelo parsing e de gerar a AST do programa BIRL
/// Representa as keywords da linguagem
pub mod kw {
// Definições
/// Usada para declaração de globais.birl constantes
pub const KW_GLOBAL: &'static str = "SAI DE CASA";
/// Usada pra declaração de globais.birl variáveis
pub const KW_VAR_GLOBAL: &'static str = "IBIRAPUERA";
/// Usada para definição de seções
pub const KW_SECTION: &'static str = "JAULA";
/// Usada para finalizar a definição de seções
pub const KW_SECTEND: &'static str = "SAINDO DA JAULA";
// Comandos
/// Copia o valor de uma variavel a outra
pub const KW_MOVE: &'static str = "BORA";
/// Limpa o valor de uma variavel
pub const KW_CLEAR: &'static str = "NUM VAI DA NAO";
/// Declara uma variável
pub const KW_DECL: &'static str = "VEM";
/// Declara uma variável com um valor
pub const KW_DECLWV: &'static str = "VEM, CUMPADE";
/// Realiza um "pulo" de uma seção para outra
pub const KW_JUMP: &'static str = "E HORA DO";
/// Comparação
pub const KW_CMP: &'static str = "E ELE QUE A GENTE QUER";
/// Comparação resultou em igual
pub const KW_CMP_EQ: &'static str = "E ELE MEMO";
/// Comparação resultou em diferente
pub const KW_CMP_NEQ: &'static str = "NUM E ELE";
/// Comparação resultou em menor
pub const KW_CMP_LESS: &'static str = "MENOR, CUMPADE";
/// Comparação resultou em menor ou igual
pub const KW_CMP_LESSEQ: &'static str = "MENOR OU E MEMO";
/// Comparação resultou em maior
pub const KW_CMP_MORE: &'static str = "MAIOR, CUMPADE";
/// Comparação resultou em maior ou igual
pub const KW_CMP_MOREEQ: &'static str = "MAIOR OU E MEMO";
/// Printa com nova linha
pub const KW_PRINTLN: &'static str = "CE QUER VER ISSO";
/// Printa
pub const KW_PRINT: &'static str = "CE QUER VER";
/// Sai do programa
pub const KW_QUIT: &'static str = "BIRL";
/// Pega uma string da entrada padrão
pub const KW_INPUT: &'static str = "BORA, CUMPADE";
/// Pega uma string da entrada padrão com letras maiusculas
pub const KW_INPUT_UP: &'static str = "BORA, CUMPADE!!!";
}
#[derive(Clone)]
/// Representa um comando, que é executado dentro do contexto atual
/// Os valores passados aos comandos têm nomes fantasia alfabéticos para exemplificação
pub enum Command {
/// Move (copia) o conteudo da variavel no endereco a pro b
Move(String, String),
/// Limpa o valor da variavel no endereco a
Clear(String),
/// Declara a variavel com nome a
Decl(String),
/// Declara a variavel com nome a e valor b
DeclWV(String, String),
/// Passa a execução para outra seção com nome a, retornando uma instrução à frente
Jump(String),
/// Compara os valores de a e b, usado em condicionais
Cmp(String, String),
/// Executa seção a caso ultima comparação seja igual
CmpEq(String),
/// Executa seção caso a ultima comparação seja diferente
CmpNEq(String),
/// Executa a seção caso a ultima comparação seja menor
CmpLess(String),
/// Executa a seção caso a ultima comparação seja menor ou igual
CmpLessEq(String),
/// Executa a seção caso a ultima comparação seja maior
CmpMore(String),
/// Executa a seção caso a ultima comparação seja maior ou igual
CmpMoreEq(String),
/// Printa uma série de valores com uma nova linha em seguida
Println(Vec<String>),
/// Printa uma série de valores
Print(Vec<String>),
/// Sai do programa
Quit,
/// Le a entrada padrão pra uma variavel
Input(String),
/// Le a entrada padrão e retorna um uppercase
InputUpper(String),
}
/// Facil representação dos comandos sem os argumentos
pub enum CommandType {
Move,
Clear,
Decl,
DeclWV,
Jump,
Cmp,
CmpEq,
CmpNEq,
CmpLess,
CmpLessEq,
CmpMore,
CmpMoreEq,
Println,
Print,
Quit,
Input,
InputUpper,
}
/// Procura pelo caractere c em src e retorna quantas vezes ele foi encontrado
fn n_of_char(c: char, src: &str) -> i32 {
if src.len() <= 0 {
0
} else {
let mut num = 0i32;
let mut last_char = ' ';
let mut is_string = false;
for cur in src.chars() {
if cur == '\"' || cur == '\'' {
// String ou caractere, verifica o ultimo caractere
if last_char == '\\' {
// caractere de escape, ignora
} else {
// inicia ou finaliza string ou char
is_string = !is_string;
}
}
if !is_string {
if cur == c {
num += 1;
}
}
last_char = cur;
}
num
}
}
/// Troca caracteres acentuados para suas versões sem acento
fn change_accents(src: &str) -> String {
let mut nstr = String::new();
for c in src.chars() {
nstr.push(match c {
'Á' | 'Ã' | 'À' => 'A',
'É' => 'E',
'Õ' | 'Ô' => 'O',
'Í' => 'I',
_ => c,
});
}
nstr
}
/// Verifica se foi passada a quantidade correta de argumentos para um comando
fn check_n_params(command: CommandType, num_params: usize) {
// Pra cada comando, retorne um valor inteiro para o numero de parametros
let (expected, id) = match command {
CommandType::Cmp => (2, kw::KW_CMP),
CommandType::CmpEq => (1, kw::KW_CMP_EQ),
CommandType::CmpNEq => (1, kw::KW_CMP_NEQ),
CommandType::CmpLess => (1, kw::KW_CMP_LESS),
CommandType::CmpLessEq => (1, kw::KW_CMP_LESSEQ),
CommandType::CmpMore => (1, kw::KW_CMP_MORE),
CommandType::CmpMoreEq => (1, kw::KW_CMP_MOREEQ),
CommandType::Jump => (1, kw::KW_JUMP),
CommandType::DeclWV => (2, kw::KW_DECLWV),
CommandType::Decl => (1, kw::KW_DECL),
CommandType::Clear => (1, kw::KW_CLEAR),
CommandType::Move => (2, kw::KW_MOVE),
// print e println aceitam mais de um argumento, então faça uma checagem adicional
CommandType::Println => {
// No caso do println, ele pode ser usado sem um argumento, assim printando apenas uma nova linha
(num_params, kw::KW_PRINTLN)
}
CommandType::Print => {
// Print não
if num_params < 1 {
(1, kw::KW_PRINT)
} else {
(num_params, kw::KW_PRINT)
}
}
CommandType::Quit => (0, kw::KW_QUIT),
CommandType::Input => (1, kw::KW_INPUT),
CommandType::InputUpper => (1, kw::KW_INPUT_UP),
};
if expected != num_params {
abort!("\"{}\" espera {} parametros, porém {} foram passados.",
id,
expected,
num_params)
}
}
/// Divide os argumentos de um comando
fn split_arguments(args: String) -> Vec<String> {
if args == "" {
vec![]
} else {
let mut result: Vec<String> = vec![];
let (mut in_str, mut in_char, mut last_escape, mut in_sym, mut in_par) =
(false, false, false, false, false);
let mut num_op_par = 0; // Numero de parenteses abertos
let mut last_arg = String::new();
for c in args.chars() {
match c {
'\"' if in_str => {
if last_escape {
last_escape = false;
last_arg.push_str("\\\"");
} else {
in_str = false;
last_arg.push('\"');
}
}
'\"' => {
in_str = true;
last_arg.push('\"');
}
'\'' => {
last_arg.push(c);
if !in_str {
in_char = !in_char;
}
}
'\\' => {
if last_escape {
last_arg.push('\\');
last_escape = false;
} else {
last_escape = true;
}
}
'a'...'z' | 'A'...'Z' | '_' if !in_str && !in_char => {
in_sym = true;
last_arg.push(c);
}
'(' if in_sym => {
// Parenteses
num_op_par += 1; // Abriu um parentese
in_par = true;
last_arg.push(c);
}
')' if in_par => {
if num_op_par <= 0 {
abort!("Parentese de fechamento sem nenhum abrindo!")
}
num_op_par -= 1;
if num_op_par <= 0 {
in_par = false;
}
last_arg.push(c);
}
',' if in_sym && !in_par => {
in_sym = false;
result.push(last_arg.clone());
last_arg.clear();
}
',' if !in_str && !in_char && !in_sym && !in_par => {
result.push(last_arg.clone());
last_arg.clear();
}
' ' if !in_str && !in_char && !in_sym => {}
_ => last_arg.push(c),
}
}
if last_arg != "" {
result.push(last_arg.clone());
}
result.iter().map(|arg| arg.trim().to_string()).collect::<Vec<String>>()
}
}
/// Divide um comando em nome e argumentos
fn split_command(cmd: String) -> Vec<String> {
let mut has_args = true; // Se o comando possui argumentos
let index = match cmd.find(':') {
Some(i) => i,
None => {
has_args = false;
cmd.len()
}
};
let cmd_name = &cmd[..index];
let cmd_args: &str = if has_args {
&cmd[index + 1..]
} else {
""
};
vec![cmd_name.to_string(), cmd_args.to_string()]
}
/// Faz parsing de um comando
fn parse_cmd(cmd: &str) -> Command {
// Estrutura de um comando:
// COMANDO: var1, var2, ...
let cmd_parts = split_command(cmd.to_string());
let cmd_parts = cmd_parts.iter().map(|part| part.trim()).collect::<Vec<&str>>();
// argumentos
let mut arguments: Vec<String> = Vec::new();
// Tipo/nome do comando
let cmd_type = if cmd_parts.len() > 1 {
if n_of_char(',', cmd_parts[1]) == 0 {
if cmd_parts[1].trim() != "" {
// Um argumento
arguments.push(cmd_parts[1].trim().to_string());
}
} else {
arguments = split_arguments(cmd_parts[1].trim().to_string());
}
cmd_parts[0]
} else {
cmd.trim()
};
let cmd: Command = match cmd_type {
kw::KW_MOVE => {
check_n_params(CommandType::Move, arguments.len());
let (addr1, addr2) = (arguments[0].clone(), arguments[1].clone());
Command::Move(addr1, addr2)
}
kw::KW_CLEAR => {
check_n_params(CommandType::Clear, arguments.len());
Command::Clear(arguments[0].clone())
}
kw::KW_DECL => {
check_n_params(CommandType::Decl, arguments.len());
Command::Decl(arguments[0].clone())
}
kw::KW_DECLWV => {
check_n_params(CommandType::DeclWV, arguments.len());
let (name, val) = (arguments[0].clone(), arguments[1].clone());
Command::DeclWV(name, val)
}
kw::KW_JUMP => {
// Jump requere uma gambiarra: As funções podem ter argumentos (',') adicionais, então use joint pra juntar os argumentos em 1 e retorne
check_n_params(CommandType::Jump, arguments.len());
Command::Jump(arguments[0].clone())
}
kw::KW_CMP => {
check_n_params(CommandType::Cmp, arguments.len());
let (addr1, addr2) = (arguments[0].clone(), arguments[1].clone());
Command::Cmp(addr1, addr2)
}
kw::KW_CMP_EQ => {
check_n_params(CommandType::CmpEq, arguments.len());
Command::CmpEq(arguments[0].clone())
}
kw::KW_CMP_NEQ => {
check_n_params(CommandType::CmpNEq, arguments.len());
Command::CmpNEq(arguments[0].clone())
}
kw::KW_CMP_LESS => {
check_n_params(CommandType::CmpLess, arguments.len());
Command::CmpLess(arguments[0].clone())
}
kw::KW_CMP_LESSEQ => {
check_n_params(CommandType::CmpLessEq, arguments.len());
Command::CmpLessEq(arguments[0].clone())
}
kw::KW_CMP_MORE => {
check_n_params(CommandType::CmpMore, arguments.len());
Command::CmpMore(arguments[0].clone())
}
kw::KW_CMP_MOREEQ => {
check_n_params(CommandType::CmpMoreEq, arguments.len());
Command::CmpMoreEq(arguments[0].clone())
}
kw::KW_PRINTLN => {
check_n_params(CommandType::Println, arguments.len());
Command::Println(arguments.iter().map(|arg| arg.clone()).collect::<Vec<String>>())
}
kw::KW_PRINT => {
check_n_params(CommandType::Print, arguments.len());
Command::Print(arguments.iter().map(|arg| arg.clone()).collect::<Vec<String>>())
}
kw::KW_QUIT => {
check_n_params(CommandType::Quit, arguments.len());
Command::Quit
}
kw::KW_INPUT => {
check_n_params(CommandType::Input, arguments.len());
Command::Input(arguments[0].clone())
}
kw::KW_INPUT_UP => {
check_n_params(CommandType::InputUpper, arguments.len());
Command::InputUpper(arguments[0].clone())
}
_ => abort!("Comando \"{}\" não existe.", cmd_type),
};
cmd
}
/// Representa uma unidade (arquivo compilado) contendo o conteudo a ser executado
pub struct Unit {
/// Conjunto de seções para execução
pub sects: Vec<Section>,
/// Conjunto de globais.birl, constantes ou variaveis
pub globals: Vec<Global>,
/// Conjunto de comandos fora de funções para serem executadas no inicio do programa
pub glb_cmds: Vec<Command>,
}
/// Representa diferentes tipos de informação que uma linha carrega e o que representa
enum LineType {
/// No caso de representar um comando
Command,
/// No caso de representar uma declaração de seção
SectStart,
/// No caso de representar o fim de uma seção
SectEnd,
/// Na declaração de um global
GlobalDecl,
}
fn parse_line_type(line: &str) -> LineType {
// line já foi usada trim()
// testa se está finalizando uma seção
if line == kw::KW_SECTEND {
LineType::SectEnd
} else {
let mut ret = LineType::Command;
// Se for a declaração de uma seção
if line.split(' ').collect::<Vec<&str>>()[0] == kw::KW_SECTION {
ret = LineType::SectStart;
}
// Testa se é a declaração de um global
let fword = line.split(':').collect::<Vec<&str>>()[0];
if fword == kw::KW_GLOBAL || fword == kw::KW_VAR_GLOBAL {
ret = LineType::GlobalDecl;
}
ret
}
}
/// Realiza a interpretação de um arquivo e retorna sua unidade compilada
pub fn parse(file: &str) -> Unit {
use std::fs;
use std::io::{BufRead, BufReader};
let f = match fs::File::open(file) {
Ok(ff) => ff,
Err(err) => {
abort!("Não foi possivel abrir o arquivo \"{}\". Erro: {}",
file,
err)
}
};
// Valor de retorno
let mut final_unit = Unit {
sects: vec![],
globals: vec![],
glb_cmds: vec![],
};
let reader = BufReader::new(f);
let mut lines = reader.lines();
// Se está fazendo parsing de uma seção e o conteudo atual da seção
let mut parsing_section = false;
let mut cur_section: Vec<String> = vec![];
loop {
let line = match lines.next() {
Some(l) => {
match l {
Ok(ll) => String::from(ll.trim()),
Err(_) => String::new(),
}
}
None => break,
};
if line == "" {
continue;
}
// Retira os comentarios das linhas
let line = if line.contains('#') || line.contains(';') {
let mut tmp = String::new();
for c in line.chars() {
// Enquanto o caractere não for um comentário, continue
if c != '#' && c != ';' {
tmp.push(c);
} else {
break;
}
}
tmp.trim().to_string()
} else {
line.trim().to_string()
};
if line == "" {
// Depois de tirar os comentarios, a linha ficou vazia
continue;
}
// Verifica a primeira palavra da linha
match parse_line_type(&line) {
// Se for declaração de um global, empurra o global pra unit
LineType::GlobalDecl if !parsing_section => {
final_unit.globals.push(parse_global(&line))
}
// Se for declaração de uma seção, começa o parsing da seção
LineType::SectStart if !parsing_section => {
cur_section.push(line.clone());
parsing_section = true;
}
LineType::SectEnd if parsing_section => {
cur_section.push(line.clone());
if line == kw::KW_SECTEND {
// Encerra seção
parsing_section = false;
final_unit.sects.push(parse_section(cur_section.clone()));
cur_section.clear();
}
}
// Quando estiver dentro de uma seção, empurre o comando pra seção
LineType::Command if parsing_section => {
// Mude os acentos para que aceite comandos com acento
cur_section.push(change_accents(&line));
}
// Se não for nenhuma (os comandos só são interpretados dentro da seção)
LineType::Command => final_unit.glb_cmds.push(parse_cmd(&change_accents(&line))),
_ => {
// Quando não for nenhuma das acima
// FIXME: Adicionar uma mensagem de erro mais clara
abort!("Erro de sintaxe! Linha atual não reconhecida no contexto: {}",
line)
}
}
}
final_unit
}
#[derive(Clone)]
/// Representa uma área chamável que pode ser executada
pub struct Section {
/// Nome da seção
pub name: String,
/// Conjunto de linhas/comandos para execução
pub lines: Vec<Command>,
/// Conjunto de parametros a serem passados para a seção ser executada
pub param_list: Vec<ExpectedParameter>,
}
impl Section {
pub fn new() -> Section {
Section {
name: String::new(),
lines: vec![],
param_list: vec![],
}
}
}
use value;
#[derive(Clone)]
pub struct ExpectedParameter {
/// Identificador do parametro
pub id: String,
/// Tipo que o parametro espera
pub tp: value::ValueType,
}
/// Faz parsing de um parametro
fn parse_parameter(param: &str) -> ExpectedParameter {
let div_token = match param.find(':') {
Some(pos) => pos,
None => abort!("Parametro deve ter tipo declarado depois do nome, separado por um ':'"),
};
let param_id = ¶m[..div_token];
let param_tp = match value::ValueType::try_parse(¶m[div_token + 1..]) {
Some(tp) => tp,
None => {
abort!("Tipo inválido para parâmetro: {}",
¶m[div_token + 1..])
}
};
ExpectedParameter {
id: param_id.trim().to_string(),
tp: param_tp,
}
}
/// Faz parsing da lista de argumentos que uma seção recebe
fn parse_section_parameters(decl_line: &str) -> Vec<ExpectedParameter> {
let decl_line = decl_line.trim();
if !decl_line.contains('(') {
vec![] // Nenhum argumento, retorna um array vazio
} else {
// Formato da declaração de uma seção com parametros:
// JAULA seção (PARAMETRO1:TIPO, ...)
let start_par = decl_line.find('(').unwrap(); // Ja verifiquei a existencia de um parentese
if start_par >= decl_line.len() {
abort!("Parametros declarados de forma incorreta. Parêntese em aberto");
}
let fin_par = decl_line.find(')').expect("Parêntese de fechamento não encontrado na declaração dos parametros da seção");
if fin_par < start_par {
abort!("Erro na sintaxe! Parêntese de fechamento veio antes do de abertura");
}
let parameters = decl_line[start_par + 1..fin_par].trim();
if parameters == "" {
vec![] // Retorna um array vazio, são só os parenteses nessa seção
} else {
if parameters.contains(',') {
parameters.split(',').map(|param| parse_parameter(param.trim())).collect()
} else {
vec![parse_parameter(parameters)]
}
}
}
}
/// Faz parsing de uma seção
fn parse_section(lines: Vec<String>) -> Section {
// Separa a seção em linha
if lines.len() < 2 {
abort!("Erro fazendo parsing da seção. Número incorreto de linhas: {}.",
lines.len())
} else {
// Checagens de declaração e finalização são feitas em parse
// Declaração de uma seção:
// PALAVRA_CHAVE nome
if !lines[0].contains(' ') {
abort!("Erro na declaração da seção! Falta nome depois da palavra chave")
}
let params = parse_section_parameters(&lines[0]);
let first_space = lines[0].find(' ').unwrap(); // Primeira ocorrencia de espaco
let name = if lines[0].contains('(') {
// Se a declaração possui parametros, separa o nome dos parametros
let starting_par = lines[0].find('(').unwrap();
lines[0][first_space + 1..starting_par].trim().to_string()
} else {
lines[0][first_space + 1..].trim().to_string()
};
let mut sect = Section {
name: name,
lines: vec![],
param_list: params,
};
if lines.len() > 2 {
// O -1 é pra não contar com a ultima linha, o SAINDO DA JAULA
for line in lines[1..lines.len() - 1].iter() {
if line == "" {
continue;
}
// Se a linha não tem nada de util até um comentario, pula ela
if line.chars().collect::<Vec<char>>()[0] == '#' ||
line.chars().collect::<Vec<char>>()[0] == ';' {
continue;
}
sect.lines.push(parse_cmd(&line));
}
}
sect
}
}
#[derive(Clone)]
/// Representa um valor global, constante
pub struct Global {
/// Identificador do valor global
pub identifier: String,
/// Valor do global
pub value: String,
/// Se o global é constante ou não
pub is_const: bool,
}
/// Divide a declaração do global
fn split_global<'a>(glb: &'a str) -> Vec<&'a str> {
let index = match glb.find(':') {
Some(i) => i,
None => abort!("Numero incorreto de ':' na declaração de um global."),
};
if index >= glb.len() - 1 {
abort!("Faltam informações depois do primeiro ':'")
}
let nindex = match glb[index + 1..].find(':') {
Some(i) => i,
None => abort!("Numero incorreto de ':' na declaração de um global."),
};
if nindex >= glb.len() - 1 {
abort!("Faltam informações após o segundo ':'")
}
vec![&glb[..index].trim(),
&glb[index + 1..nindex + index + 1].trim(),
&glb[nindex + index + 2..].trim()]
}
/// Faz parsing de um global
fn parse_global(glb: &str) -> Global {
// Estrutura da declaração de um global: PALAVRA_CHAVE: nome: valor
let words = split_global(glb.trim());
if words.len() != 3 {
abort!("Problema na declaração do global. Número incorreto de ':': {}",
words.len())
}
let is_const = match words[0].trim() {
kw::KW_GLOBAL => true,
kw::KW_VAR_GLOBAL => false,
_ => unreachable!(),
};
// Separa o nome e valor do global
let (glb_name, glb_value) = (words[1].clone(), String::from(words[2]));
Global {
identifier: String::from(glb_name),
value: glb_value,
is_const: is_const,
}
}
|
pub fn preprocess(string: &str) -> String {
replace_escapes(string)
}
/// This replaces \\<escape> instances with \<escape>.
/// The readline functionality sanitizes these escapes with the double backslash, this returns them to the form they were entered with.
fn replace_escapes(string: &str) -> String {
let string = string.to_string();
string
.replace("\\n", "\n")
.replace("\\t", "\t")
.replace("\\r", "\r")
.replace("\\\\", "\\")
.replace(r#"\""#, "\"")
}
#[cfg(test)]
mod test {
use super::*;
use testing::test_constants;
use test::Bencher;
#[test]
fn escape_escapes() {
assert_eq!("\n".to_string(), replace_escapes("\\n"));
assert_eq!("hello\n".to_string(), replace_escapes("hello\\n"));
assert_eq!("hello\nworld", replace_escapes("hello\\nworld"));
assert_eq!("hello\tworld", replace_escapes("hello\\tworld"));
assert_eq!("hello\n\rworld", replace_escapes("hello\\n\\rworld"));
assert_eq!("hello\\world", replace_escapes("hello\\\\world"));
assert_eq!("hello\"world", replace_escapes(r#"hello\\"world"#));
}
#[bench]
fn preprocess_simple_program_bench(b: &mut Bencher) {
b.iter(|| preprocess(test_constants::SIMPLE_PROGRAM_INPUT_1));
}
}
|
use image::{DynamicImage, GenericImage};
use rand::Rng;
use pbr::ProgressBar;
pub mod ray;
pub mod vector;
pub mod intersectable;
pub mod scene;
pub mod camera;
pub mod render;
pub mod color;
pub mod aabb;
pub mod texture;
use vector::Vec3;
use scene::{Scene, Sphere, MovingSphere, Material, Surface, SceneItem, Coloration};
use camera::Camera;
use render::get_color;
use color::Color;
use texture::{CheckerTexture, NoiseTexture};
pub fn main() {
let nx = 600;
let ny = 300;
let ns = 100;
let mut rng = rand::thread_rng();
let mut progress = ProgressBar::new(nx as u64);
let camera_pos = Vec3::new(10., 4., 4.);
let camera_look_at = Vec3::new(0., 0., 0.);
let focus_dist = (camera_pos - camera_look_at).magn();
let camera = Camera::new(
camera_pos,
camera_look_at,
Vec3::new(0., 1., 0.),
30., nx as f32 / ny as f32, 0.01, focus_dist, 0., 1.);
let mut img = DynamicImage::new_rgb8(nx, ny);
let scene = get_scene();
for x in 0..nx {
for y in 0..ny {
let mut col = Color::black();
for _s in 0..ns {
let ru: f64 = rng.gen();
let u = (x as f64 + ru) as f64 / nx as f64;
let rv: f64 = rng.gen();
let v = (ny as f64 - y as f64 + rv) as f64 / ny as f64;
let r = camera.get_ray(u, v);
col = col + get_color(&scene, &r, 1);
}
col = col / ns as f32;
img.put_pixel(x, y, col.to_rgba());
}
progress.inc();
}
img.save("output.png").unwrap();
progress.finish_print("done");
}
fn get_scene() -> Scene {
// let tx = CheckerTexture::new(Color::blue(), Color::red());
let tx = NoiseTexture::new();
let diff_bottom_mat = Material {
color: Coloration::NoiseTexture(tx),
albedo: 0.3,
surface: Surface::Diffuse
};
let big_sphere = Sphere::new(Vec3::new(0., -1000., -1.), 1000., diff_bottom_mat);
let tx2 = CheckerTexture::new(Color::red(1.), Color::green(1.));
let diff_sm = Material {
color: Coloration::CheckerTexture(tx2),
albedo: 0.4,
surface: Surface::Diffuse
};
let sm_sphere = Sphere::new(Vec3::new(0., 2., 0.), 2., diff_sm);
let mut items = Vec::new();
items.push(SceneItem::Sphere(big_sphere));
items.push(SceneItem::Sphere(sm_sphere));
Scene::new(items)
}
fn random_scene() -> Scene {
let tx = CheckerTexture::new(Color::blue(1.), Color::red(1.));
let diff_bottom_mat = Material {
color: Coloration::CheckerTexture(tx),
albedo: 0.3,
surface: Surface::Diffuse
};
let big_sphere = Sphere::new(Vec3::new(0., -1000., -1.), 1000., diff_bottom_mat);
let mut items = Vec::new();
items.push(SceneItem::Sphere(big_sphere));
let mut rng = rand::thread_rng();
for a in -11..11 {
for b in -11..11 {
let mat_prob: f64 = rng.gen();
let center = Vec3::new(a as f64 + 0.9 * rng.gen::<f64>(), 0.2, b as f64 + 0.9 * rng.gen::<f64>());
if (center - Vec3::new(4., 0.2, 0.)).magn() > 0.9 {
if mat_prob < 0.8 {
let diff_mat = Material {
color: Coloration::Color(Color::new(rng.gen(), rng.gen(), rng.gen())),
albedo: rng.gen(),
surface: Surface::Diffuse
};
let sphere = MovingSphere::new(center, Vec3::new(center.x(), center.y() + rng.gen::<f64>(), center.z()), 0.2, diff_mat, 0., 1.);
// let sphere = Sphere::new(center, 0.2, diff_mat);
items.push(SceneItem::MovingSphere(sphere));
} else if mat_prob < 0.95 {
let metall_mat = Material {
color: Coloration::Color(Color::white()),
albedo: 0.8,
surface: Surface::Reflective {
reflectivity: rng.gen()
}
};
let metall_sphere = Sphere::new(center, 0.2, metall_mat);
items.push(SceneItem::Sphere(metall_sphere));
} else {
let glass_mat = Material {
color: Coloration::Color(Color::white()),
albedo: 1.,
surface: Surface::Refractive {
index: 1.5
}
};
let left_sphere = Sphere::new(center, 0.2, glass_mat);
items.push(SceneItem::Sphere(left_sphere));
}
}
}
}
let s1 = Sphere::new(Vec3::new(0., 1., 0.), 1., Material {
color: Coloration::Color(Color::white()),
albedo: 0.8,
surface: Surface::Refractive {
index: 1.5
}
});
let s2 = Sphere::new(Vec3::new(-4., 1., 0.), 1., Material {
color: Coloration::Color(Color::new(0.4, 0.2, 0.1)),
albedo: 0.8,
surface: Surface::Diffuse
});
let s3 = Sphere::new(Vec3::new(4., 1., 0.), 1., Material {
color: Coloration::Color(Color::new(0.4, 0.2, 0.1)),
albedo: 0.8,
surface: Surface::Reflective {
reflectivity: 0.
}
});
items.push(SceneItem::Sphere(s1));
items.push(SceneItem::Sphere(s2));
items.push(SceneItem::Sphere(s3));
Scene::new(items)
}
|
//! The page for viewing the list of all repositories
use crate::registry::{self, RepoName};
use seed::browser::fetch;
use seed::error;
use seed::prelude::*;
use seed::{a, attrs, div, input, C};
pub struct Model {
repositories: Vec<RepoName>,
}
#[derive(Debug)]
pub enum Msg {
FetchedRepos(fetch::Result<Vec<RepoName>>),
SearchInput(String),
}
pub fn init(orders: &mut impl Orders<Msg>) -> Model {
orders.perform_cmd(async { Msg::FetchedRepos(registry::get_repositories().await) });
Model {
repositories: vec![],
}
}
pub fn update(msg: Msg, model: &mut Model, _orders: &mut impl Orders<Msg>) {
match msg {
Msg::FetchedRepos(result) => match result {
Ok(mut repos) => {
repos.sort();
model.repositories = repos;
}
Err(e) => {
error!(e);
}
},
Msg::SearchInput(input) => sort_repos(input, &mut model.repositories),
}
}
fn sort_repos(input: String, repos: &mut Vec<RepoName>) {
repos.sort_by(|a, b| {
b.name
.contains(&input)
.cmp(&a.name.contains(&input))
.then(a.cmp(&b))
})
}
pub fn view(model: &Model) -> Node<Msg> {
let view_card = |name: &RepoName| {
a![
C!["repo_list_card"],
attrs! {
At::Href => match name.namespace {
Some(_) => format!("/repo/{}", name),
None => format!("/repo/_/{}", name),
}
},
div![C!["repo_card_header"], format!("{}", name),]
]
};
div![
input![
C!["repo_search"],
attrs! {At::Placeholder => "🔍"},
input_ev(Ev::Input, |input| Msg::SearchInput(input)),
],
div![C!["list"], model.repositories.iter().map(view_card)]
]
}
|
extern crate extended_collections;
extern crate rand;
use extended_collections::lsm_tree::compaction::{LeveledStrategy, SizeTieredStrategy};
use extended_collections::lsm_tree::{LsmMap, Result};
use self::rand::{thread_rng, Rng};
use std::fs;
use std::panic;
use std::vec::Vec;
fn teardown(test_name: &str) {
fs::remove_dir_all(format!("{}", test_name)).ok();
}
fn run_test<T>(test: T, test_name: &str) -> Result<()>
where
T: FnOnce() -> Result<()>,
{
let result = test();
teardown(test_name);
result
}
#[test]
fn int_test_lsm_map_size_tiered_strategy() -> Result<()> {
let test_name = "int_test_lsm_map_size_tiered_strategy";
run_test(
|| {
let mut rng: rand::XorShiftRng = rand::SeedableRng::from_seed([1, 1, 1, 1]);
let mut sts = SizeTieredStrategy::new(test_name, 1000, 4, 4000, 0.5, 1.5)?;
let mut map = LsmMap::new(sts);
let mut expected = Vec::new();
for _ in 0..10_000 {
let key = rng.gen::<u32>();
let val = rng.gen::<u64>();
map.insert(key, val)?;
expected.push((key, val));
}
expected.reverse();
expected.sort_by(|l, r| l.0.cmp(&r.0));
expected.dedup_by_key(|pair| pair.0);
assert_eq!(map.len()?, expected.len());
assert_eq!(map.len_hint()?, expected.len());
assert_eq!(map.min()?, Some(expected[0].0));
assert_eq!(map.max()?, Some(expected[expected.len() - 1].0));
map.flush()?;
sts = SizeTieredStrategy::open(test_name)?;
map = LsmMap::new(sts);
for entry in &expected {
assert!(map.contains_key(&entry.0)?);
assert_eq!(map.get(&entry.0)?, Some(entry.1));
}
thread_rng().shuffle(&mut expected);
let mut expected_len = expected.len();
for (index, entry) in expected.iter().rev().enumerate() {
assert!(map.contains_key(&entry.0)?);
map.remove(entry.0)?;
expected_len -= 1;
assert!(!map.contains_key(&entry.0)?);
assert_eq!(map.get(&entry.0)?, None);
assert!(map.len_hint()? >= expected_len);
if index % 5000 == 0 {
assert_eq!(map.len()?, expected_len);
}
}
expected.clear();
for _ in 0..1000 {
let key = rng.gen::<u32>();
let val = rng.gen::<u64>();
map.insert(key, val)?;
expected.push((key, val));
}
map.clear()?;
for entry in &expected {
assert!(!map.contains_key(&entry.0)?);
assert_eq!(map.get(&entry.0)?, None);
}
assert_eq!(map.min()?, None);
assert_eq!(map.max()?, None);
map.flush()?;
Ok(())
},
test_name,
)
}
#[test]
fn int_test_lsm_map_leveled_strategy() -> Result<()> {
let test_name = "int_test_lsm_map_leveled_strategy";
run_test(
|| {
let mut rng: rand::XorShiftRng = rand::SeedableRng::from_seed([1, 1, 1, 1]);
let mut ls = LeveledStrategy::new(test_name, 1000, 4, 4000, 10, 10)?;
let mut map = LsmMap::new(ls);
let mut expected = Vec::new();
for _ in 0..10_000 {
let key = rng.gen::<u32>();
let val = rng.gen::<u64>();
map.insert(key, val)?;
expected.push((key, val));
}
expected.reverse();
expected.sort_by(|l, r| l.0.cmp(&r.0));
expected.dedup_by_key(|pair| pair.0);
assert_eq!(map.len()?, expected.len());
assert_eq!(map.len_hint()?, expected.len());
assert_eq!(map.min()?, Some(expected[0].0));
assert_eq!(map.max()?, Some(expected[expected.len() - 1].0));
map.flush()?;
ls = LeveledStrategy::open(test_name)?;
map = LsmMap::new(ls);
for entry in &expected {
assert!(map.contains_key(&entry.0)?);
assert_eq!(map.get(&entry.0)?, Some(entry.1));
}
thread_rng().shuffle(&mut expected);
let mut expected_len = expected.len();
for (index, entry) in expected.iter().rev().enumerate() {
assert!(map.contains_key(&entry.0)?);
map.remove(entry.0)?;
expected_len -= 1;
assert!(!map.contains_key(&entry.0)?);
assert_eq!(map.get(&entry.0)?, None);
assert!(map.len_hint()? >= expected_len);
if index % 5000 == 0 {
assert_eq!(map.len()?, expected_len);
}
}
expected.clear();
for _ in 0..1000 {
let key = rng.gen::<u32>();
let val = rng.gen::<u64>();
map.insert(key, val)?;
expected.push((key, val));
}
map.clear()?;
for entry in &expected {
assert!(!map.contains_key(&entry.0)?);
assert_eq!(map.get(&entry.0)?, None);
}
assert_eq!(map.min()?, None);
assert_eq!(map.max()?, None);
map.flush()?;
Ok(())
},
test_name,
)
}
|
use std::io::Result;
use std::os::unix::io::RawFd;
use std::path::PathBuf;
pub fn before_exec() -> Result<()> {
use libc;
unsafe {
libc::setsid();
libc::ioctl(0, libc::TIOCSCTTY, 1);
}
Ok(())
}
pub fn fork() -> usize {
use libc;
unsafe { libc::fork() as usize }
}
pub fn set_winsize(fd: RawFd, row: u16, col: u16, xpixel: u16, ypixel: u16) {
use libc;
unsafe {
let size = libc::winsize {
ws_row: row,
ws_col: col,
ws_xpixel: xpixel,
ws_ypixel: ypixel,
};
libc::ioctl(fd, libc::TIOCSWINSZ, &size as *const libc::winsize);
}
}
pub fn getpty() -> (RawFd, PathBuf) {
use libc;
use std::ffi::CStr;
use std::fs::OpenOptions;
use std::io::Error;
use std::os::unix::io::IntoRawFd;
const TIOCPKT: libc::c_ulong = 0x5420;
extern "C" {
fn ptsname(fd: libc::c_int) -> *const libc::c_char;
fn grantpt(fd: libc::c_int) -> libc::c_int;
fn unlockpt(fd: libc::c_int) -> libc::c_int;
fn ioctl(fd: libc::c_int, request: libc::c_ulong, ...) -> libc::c_int;
}
let master_fd = OpenOptions::new()
.read(true)
.write(true)
.open("/dev/ptmx")
.unwrap()
.into_raw_fd();
unsafe {
let mut flag: libc::c_int = 1;
if ioctl(master_fd, TIOCPKT, &mut flag as *mut libc::c_int) < 0 {
panic!("ioctl: {:?}", Error::last_os_error());
}
if grantpt(master_fd) < 0 {
panic!("grantpt: {:?}", Error::last_os_error());
}
if unlockpt(master_fd) < 0 {
panic!("unlockpt: {:?}", Error::last_os_error());
}
}
let tty_path = unsafe {
PathBuf::from(
CStr::from_ptr(ptsname(master_fd))
.to_string_lossy()
.into_owned(),
)
};
(master_fd, tty_path)
}
|
#[doc = "Register `GPIOF_HWCFGR6` reader"]
pub type R = crate::R<GPIOF_HWCFGR6_SPEC>;
#[doc = "Field `MODER_RES` reader - MODER_RES"]
pub type MODER_RES_R = crate::FieldReader<u32>;
impl R {
#[doc = "Bits 0:31 - MODER_RES"]
#[inline(always)]
pub fn moder_res(&self) -> MODER_RES_R {
MODER_RES_R::new(self.bits)
}
}
#[doc = "GPIO hardware configuration register 6\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`gpiof_hwcfgr6::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct GPIOF_HWCFGR6_SPEC;
impl crate::RegisterSpec for GPIOF_HWCFGR6_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`gpiof_hwcfgr6::R`](R) reader structure"]
impl crate::Readable for GPIOF_HWCFGR6_SPEC {}
#[doc = "`reset()` method sets GPIOF_HWCFGR6 to value 0xffff_ffff"]
impl crate::Resettable for GPIOF_HWCFGR6_SPEC {
const RESET_VALUE: Self::Ux = 0xffff_ffff;
}
|
use std::fs;
fn main() {
day1();
}
fn day1() {
let contents = fs::read_to_string("input/day1.input")
.expect("Something went wrong reading the file");
let numbers: Vec<i32> = contents
.lines()
.map(|s| s.parse().unwrap())
.collect();
for i in 0..numbers.len() {
for j in (i + 1)..numbers.len() {
if numbers[i] + numbers[j] == 2020 {
println!("Two numbers: {}", numbers[i] * numbers[j]);
}
}
}
for i in 0..numbers.len() {
for j in (i + 1)..numbers.len() {
for k in (j + 1)..numbers.len() {
if numbers[i] + numbers[j] + numbers[k] == 2020 {
println!("Three numbers: {}", numbers[i] * numbers[j] * numbers[k]);
}
}
}
}
} |
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub struct ReadableServersMessage {
pub content: Vec<ReadableServersContent>,
pub contentType: String,
}
#[derive(Serialize, Deserialize)]
pub struct ReadableServersContent {
pub id: String,
pub gameMode: String,
pub permissions: u8,
pub playerCount: String,
pub provider: String,
pub region: String,
pub status: String,
}
#[derive(Serialize, Deserialize)]
pub struct ChangelogMessage {
pub content: Vec<ChangelogContent>,
pub contentType: String,
}
#[derive(Serialize, Deserialize)]
pub struct ChangelogContent {
pub content: String,
pub date: String,
pub title: String,
pub version: String,
pub warn: bool,
}
|
use crate::{config::constants, utils::jwt::Token};
use actix_identity::RequestIdentity;
use actix_service::{Service, Transform};
use actix_web::{
dev::{ ServiceRequest, ServiceResponse},
web, Error, HttpResponse,
};
use futures::{
future::{ok, Ready},
Future,
};
use sqlx::{Pool, SqliteConnection};
use std::pin::Pin;
use std::task::{Context, Poll};
type DataPoolSqlite = web::Data<Pool<SqliteConnection>>;
pub struct Authentication;
impl<S, B> Transform<S> for Authentication
where
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
type Request = ServiceRequest;
type Response = ServiceResponse<B>;
type Error = Error;
type InitError = ();
type Transform = AuthenticationMiddleware<S>;
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ok(AuthenticationMiddleware { service })
}
}
pub struct AuthenticationMiddleware<S> {
service: S,
}
impl<S, B> Service for AuthenticationMiddleware<S>
where
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
type Request = ServiceRequest;
type Response = ServiceResponse<B>;
type Error = Error;
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>>>>;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.service.poll_ready(cx)
}
fn call(&mut self, req: ServiceRequest) -> Self::Future {
let mut authenticate_pass: bool = false;
let identity = req.get_identity();
if let Some(iden) = identity {
if Token::decode(iden).is_ok() {
authenticate_pass = true;
}
}
if authenticate_pass {
let fut = self.service.call(req);
Box::pin(async move {
let res = fut.await?;
Ok(res)
})
} else {
Box::pin(async move {
// let resp: ResponseBody<String> =
// ResponseBody::new(Status::SUCCESS, constants::MESSAGE_INVALID_TOKEN);
Ok(req.into_response(
HttpResponse::Unauthorized()
.json(constants::MESSAGE_INVALID_TOKEN)
.into_body(),
))
})
}
}
}
|
//! This crate contains a high-level abstraction for reading and manipulating
//! DICOM objects.
//! At this level, objects are comparable to a dictionary of elements,
//! in which some of them can have DICOM objects themselves.
//! The end user should prefer using this abstraction when dealing with DICOM
//! objects.
//!
//! # Examples
//!
//! Loading a DICOM file and reading some attributes by their standard alias:
//!
//! ```no_run
//! use dicom_object::open_file;
//! # fn foo() -> Result<(), Box<dyn std::error::Error>> {
//! let obj = open_file("0001.dcm")?;
//! let patient_name = obj.element_by_name("PatientName")?.to_str()?;
//! let modality = obj.element_by_name("Modality")?.to_str()?;
//! # Ok(())
//! # }
//! ```
//!
//! Elements can also be fetched by tag.
//! Methods are available for converting the element's DICOM value
//! into something more usable in Rust.
//!
//! ```
//! # use dicom_object::{DefaultDicomObject, Tag};
//! # fn something(obj: DefaultDicomObject) -> Result<(), Box<dyn std::error::Error>> {
//! let patient_date = obj.element(Tag(0x0010, 0x0030))?.to_date()?;
//! let pixel_data_bytes = obj.element(Tag(0x7FE0, 0x0010))?.to_bytes()?;
//! # Ok(())
//! # }
//! ```
//!
//! Finally, DICOM objects can be serialized back into DICOM encoded bytes.
//! A method is provided for writing a file DICOM object into a new DICOM file.
//!
//! ```no_run
//! # use dicom_object::{DefaultDicomObject, Tag};
//! # fn something(obj: DefaultDicomObject) -> Result<(), Box<dyn std::error::Error>> {
//! obj.write_to_file("0001_new.dcm")?;
//! # Ok(())
//! # }
//! ```
//!
//! This method requires you to write a [file meta table] first.
//! When creating a new DICOM object from scratch,
//! use a [`FileMetaTableBuilder`] to construct the file meta group,
//! then use `with_meta` or `with_exact_meta`:
//!
//! [file meta table]: crate::meta::FileMetaTable
//! [`FileMetaTableBuilder`]: crate::meta::FileMetaTableBuilder
//!
//! ```no_run
//! # use dicom_object::{InMemDicomObject, FileMetaTableBuilder};
//! # fn something(obj: InMemDicomObject) -> Result<(), Box<dyn std::error::Error>> {
//! let file_obj = obj.with_meta(
//! FileMetaTableBuilder::new()
//! // Implicit VR Little Endian
//! .transfer_syntax("1.2.840.10008.1.2")
//! // Computed Radiography image storage
//! .media_storage_sop_class_uid("1.2.840.10008.5.1.4.1.1.1")
//! )?;
//! file_obj.write_to_file("0001_new.dcm")?;
//! # Ok(())
//! # }
//! ```
//!
//! In order to write a plain DICOM data set,
//! use one of the various `write_dataset` methods.
//!
//! ```
//! # use dicom_object::InMemDicomObject;
//! # use dicom_core::{DataElement, Tag, VR, dicom_value};
//! # fn run() -> Result<(), Box<dyn std::error::Error>> {
//! // build your object
//! let mut obj = InMemDicomObject::create_empty();
//! let patient_name = DataElement::new(
//! Tag(0x0010, 0x0010),
//! VR::PN,
//! dicom_value!(Str, "Doe^John"),
//! );
//! obj.put(patient_name);
//!
//! // write the object's data set
//! let mut serialized = Vec::new();
//! let ts = dicom_transfer_syntax_registry::entries::EXPLICIT_VR_LITTLE_ENDIAN.erased();
//! obj.write_dataset_with_ts(&mut serialized, &ts)?;
//! assert!(!serialized.is_empty());
//! # Ok(())
//! # }
//! # run().unwrap();
//! ```
pub mod file;
pub mod loader;
pub mod mem;
pub mod meta;
#[deprecated(
since = "0.5.0",
note = "This is a stub, use the `dicom-pixeldata` crate instead"
)]
pub mod pixeldata;
pub mod tokens;
mod util;
pub use crate::file::{from_reader, open_file};
pub use crate::mem::InMemDicomObject;
pub use crate::meta::{FileMetaTable, FileMetaTableBuilder};
use dicom_core::DataDictionary;
pub use dicom_core::Tag;
pub use dicom_dictionary_std::StandardDataDictionary;
/// The default implementation of a root DICOM object.
pub type DefaultDicomObject = FileDicomObject<mem::InMemDicomObject<StandardDataDictionary>>;
use dicom_core::header::Header;
use dicom_encoding::adapters::{PixelDataObject, RawPixelData};
use dicom_encoding::{text::SpecificCharacterSet, transfer_syntax::TransferSyntaxIndex};
use dicom_parser::dataset::{DataSetWriter, IntoTokens};
use dicom_transfer_syntax_registry::TransferSyntaxRegistry;
use smallvec::SmallVec;
use snafu::{Backtrace, OptionExt, ResultExt, Snafu};
use std::fs::File;
use std::io::{BufWriter, Write};
use std::path::Path;
/// The current implementation class UID generically referring to DICOM-rs.
///
/// Automatically generated as per the standard, part 5, section B.2.
///
/// This UID is subject to changes in future versions.
pub const IMPLEMENTATION_CLASS_UID: &str = "2.25.137038125948464847900039011591283709926";
/// The current implementation version name generically referring to DICOM-rs.
///
/// This names is subject to changes in future versions.
pub const IMPLEMENTATION_VERSION_NAME: &str = "DICOM-rs 0.3";
/// Trait type for a DICOM object.
/// This is a high-level abstraction where an object is accessed and
/// manipulated as dictionary of entries indexed by tags, which in
/// turn may contain a DICOM object.
///
/// This trait interface is experimental and prone to sudden changes.
pub trait DicomObject {
type Element: Header;
/// Retrieve a particular DICOM element by its tag.
fn element(&self, tag: Tag) -> Result<Self::Element>;
/// Retrieve a particular DICOM element by its name.
fn element_by_name(&self, name: &str) -> Result<Self::Element>;
/// Retrieve the processed meta information table, if available.
///
/// This table will generally not be reachable from children objects
/// in another object with a valid meta table. As such, it is recommended
/// for this method to be called at the root of a DICOM object.
fn meta(&self) -> Option<&FileMetaTable> {
None
}
}
#[derive(Debug, Snafu)]
#[non_exhaustive]
pub enum Error {
#[snafu(display("Could not open file '{}'", filename.display()))]
OpenFile {
filename: std::path::PathBuf,
backtrace: Backtrace,
source: std::io::Error,
},
#[snafu(display("Could not read from file '{}'", filename.display()))]
ReadFile {
filename: std::path::PathBuf,
backtrace: Backtrace,
source: std::io::Error,
},
#[snafu(display("Could not parse meta group data set"))]
ParseMetaDataSet {
#[snafu(backtrace)]
source: crate::meta::Error,
},
#[snafu(display("Could not create data set parser"))]
CreateParser {
#[snafu(backtrace)]
source: dicom_parser::dataset::read::Error,
},
#[snafu(display("Could not read data set token"))]
ReadToken {
#[snafu(backtrace)]
source: dicom_parser::dataset::read::Error,
},
#[snafu(display("Could not write to file '{}'", filename.display()))]
WriteFile {
filename: std::path::PathBuf,
backtrace: Backtrace,
source: std::io::Error,
},
#[snafu(display("Could not write object preamble"))]
WritePreamble {
backtrace: Backtrace,
source: std::io::Error,
},
#[snafu(display("Could not write magic code"))]
WriteMagicCode {
backtrace: Backtrace,
source: std::io::Error,
},
#[snafu(display("Could not create data set printer"))]
CreatePrinter {
#[snafu(backtrace)]
source: dicom_parser::dataset::write::Error,
},
#[snafu(display("Could not print meta group data set"))]
PrintMetaDataSet {
#[snafu(backtrace)]
source: crate::meta::Error,
},
#[snafu(display("Could not print data set"))]
PrintDataSet {
#[snafu(backtrace)]
source: dicom_parser::dataset::write::Error,
},
#[snafu(display("Unsupported transfer syntax `{}`", uid))]
UnsupportedTransferSyntax { uid: String, backtrace: Backtrace },
#[snafu(display("No such data element with tag {}", tag))]
NoSuchDataElementTag { tag: Tag, backtrace: Backtrace },
#[snafu(display("No such data element {} (with tag {})", alias, tag))]
NoSuchDataElementAlias {
tag: Tag,
alias: String,
backtrace: Backtrace,
},
#[snafu(display("Unknown data attribute named `{}`", name))]
NoSuchAttributeName { name: String, backtrace: Backtrace },
#[snafu(display("Missing element value"))]
MissingElementValue { backtrace: Backtrace },
#[snafu(display("Unexpected token {:?}", token))]
UnexpectedToken {
token: dicom_parser::dataset::DataToken,
backtrace: Backtrace,
},
#[snafu(display("Premature data set end"))]
PrematureEnd { backtrace: Backtrace },
/// Could not build file meta table
BuildMetaTable {
#[snafu(backtrace)]
source: crate::meta::Error,
},
/// Could not prepare file meta table
PrepareMetaTable {
source: dicom_core::value::CastValueError,
backtrace: Backtrace,
},
}
pub type Result<T, E = Error> = std::result::Result<T, E>;
/// A root DICOM object contains additional meta information about the object
/// in a separate table.
#[deprecated(since = "0.4.0", note = "use `FileDicomObject` instead")]
pub type RootDicomObject<O> = FileDicomObject<O>;
/// A root DICOM object retrieved from a standard DICOM file,
/// containing additional information from the file meta group
/// in a separate table value.
#[derive(Debug, Clone, PartialEq)]
pub struct FileDicomObject<O> {
meta: FileMetaTable,
obj: O,
}
impl<O> FileDicomObject<O> {
/// Retrieve the processed meta header table.
pub fn meta(&self) -> &FileMetaTable {
&self.meta
}
/// Retrieve the inner DICOM object structure, discarding the meta table.
pub fn into_inner(self) -> O {
self.obj
}
}
impl<O> FileDicomObject<O>
where
for<'a> &'a O: IntoTokens,
{
/// Write the entire object as a DICOM file
/// into the given file path.
/// Preamble, magic code, and file meta group will be included
/// before the inner object.
pub fn write_to_file<P: AsRef<Path>>(&self, path: P) -> Result<()> {
let path = path.as_ref();
let file = File::create(path).context(WriteFile { filename: path })?;
let mut to = BufWriter::new(file);
// write preamble
to.write_all(&[0_u8; 128][..])
.context(WriteFile { filename: path })?;
// write magic sequence
to.write_all(b"DICM")
.context(WriteFile { filename: path })?;
// write meta group
self.meta.write(&mut to).context(PrintMetaDataSet)?;
// prepare encoder
let registry = TransferSyntaxRegistry::default();
let ts = registry.get(&self.meta.transfer_syntax).with_context(|| {
UnsupportedTransferSyntax {
uid: self.meta.transfer_syntax.clone(),
}
})?;
let cs = SpecificCharacterSet::Default;
let mut dset_writer = DataSetWriter::with_ts_cs(to, ts, cs).context(CreatePrinter)?;
// write object
dset_writer
.write_sequence((&self.obj).into_tokens())
.context(PrintDataSet)?;
Ok(())
}
/// Write the entire object as a DICOM file
/// into the given writer.
/// Preamble, magic code, and file meta group will be included
/// before the inner object.
pub fn write_all<W: Write>(&self, to: W) -> Result<()> {
let mut to = BufWriter::new(to);
// write preamble
to.write_all(&[0_u8; 128][..]).context(WritePreamble)?;
// write magic sequence
to.write_all(b"DICM").context(WriteMagicCode)?;
// write meta group
self.meta.write(&mut to).context(PrintMetaDataSet)?;
// prepare encoder
let registry = TransferSyntaxRegistry::default();
let ts = registry.get(&self.meta.transfer_syntax).with_context(|| {
UnsupportedTransferSyntax {
uid: self.meta.transfer_syntax.clone(),
}
})?;
let cs = SpecificCharacterSet::Default;
let mut dset_writer = DataSetWriter::with_ts_cs(to, ts, cs).context(CreatePrinter)?;
// write object
dset_writer
.write_sequence((&self.obj).into_tokens())
.context(PrintDataSet)?;
Ok(())
}
/// Write the file meta group set into the given writer.
///
/// This is equivalent to `self.meta().write(to)`.
pub fn write_meta<W: Write>(&self, to: W) -> Result<()> {
self.meta.write(to).context(PrintMetaDataSet)
}
/// Write the inner data set into the given writer,
/// without preamble, magic code, nor file meta group.
///
/// The transfer syntax is selected from the file meta table.
pub fn write_dataset<W: Write>(&self, to: W) -> Result<()> {
let to = BufWriter::new(to);
// prepare encoder
let registry = TransferSyntaxRegistry::default();
let ts = registry.get(&self.meta.transfer_syntax).with_context(|| {
UnsupportedTransferSyntax {
uid: self.meta.transfer_syntax.clone(),
}
})?;
let cs = SpecificCharacterSet::Default;
let mut dset_writer = DataSetWriter::with_ts_cs(to, ts, cs).context(CreatePrinter)?;
// write object
dset_writer
.write_sequence((&self.obj).into_tokens())
.context(PrintDataSet)?;
Ok(())
}
}
impl<O> ::std::ops::Deref for FileDicomObject<O> {
type Target = O;
fn deref(&self) -> &Self::Target {
&self.obj
}
}
impl<O> ::std::ops::DerefMut for FileDicomObject<O> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.obj
}
}
impl<O> DicomObject for FileDicomObject<O>
where
O: DicomObject,
{
type Element = <O as DicomObject>::Element;
fn element(&self, tag: Tag) -> Result<Self::Element> {
self.obj.element(tag)
}
fn element_by_name(&self, name: &str) -> Result<Self::Element> {
self.obj.element_by_name(name)
}
fn meta(&self) -> Option<&FileMetaTable> {
Some(&self.meta)
}
}
impl<'a, O: 'a> DicomObject for &'a FileDicomObject<O>
where
O: DicomObject,
{
type Element = <O as DicomObject>::Element;
fn element(&self, tag: Tag) -> Result<Self::Element> {
self.obj.element(tag)
}
fn element_by_name(&self, name: &str) -> Result<Self::Element> {
self.obj.element_by_name(name)
}
}
impl<O> IntoIterator for FileDicomObject<O>
where
O: IntoIterator,
{
type Item = <O as IntoIterator>::Item;
type IntoIter = <O as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.obj.into_iter()
}
}
/// Implement basic pixeldata encoder/decoder functionality
impl<D> PixelDataObject for FileDicomObject<InMemDicomObject<D>>
where
D: DataDictionary + Clone,
{
/// Return the Rows attribute or None if it is not found
fn rows(&self) -> Option<u16> {
self.element(dicom_dictionary_std::tags::ROWS)
.ok()?
.uint16()
.ok()
}
/// Return the Columns attribute or None if it is not found
fn cols(&self) -> Option<u16> {
self.element(dicom_dictionary_std::tags::COLUMNS)
.ok()?
.uint16()
.ok()
}
/// Return the SamplesPerPixel attribute or None if it is not found
fn samples_per_pixel(&self) -> Option<u16> {
self.element(dicom_dictionary_std::tags::SAMPLES_PER_PIXEL)
.ok()?
.uint16()
.ok()
}
/// Return the BitsAllocated attribute or None if it is not set
fn bits_allocated(&self) -> Option<u16> {
self.element(dicom_dictionary_std::tags::BITS_ALLOCATED)
.ok()?
.uint16()
.ok()
}
/// Return the NumberOfFrames attribute or None if it is not set
fn number_of_frames(&self) -> Option<u16> {
self.element(dicom_dictionary_std::tags::NUMBER_OF_FRAMES)
.ok()?
.to_int()
.ok()
}
/// Returns the number of fragments or None for native pixel data
fn number_of_fragments(&self) -> Option<u32> {
let pixel_data = self.element(dicom_dictionary_std::tags::PIXEL_DATA).ok()?;
match pixel_data.value() {
dicom_core::DicomValue::Primitive(_p) => Some(1),
dicom_core::DicomValue::PixelSequence {
offset_table: _,
fragments,
} => Some(fragments.len() as u32),
dicom_core::DicomValue::Sequence { items: _, size: _ } => None,
}
}
/// Return a specific encoded pixel fragment by index as Vec<u8>
/// or None if no pixel data is found
fn fragment(&self, fragment: usize) -> Option<Vec<u8>> {
let pixel_data = self.element(dicom_dictionary_std::tags::PIXEL_DATA).ok()?;
match pixel_data.value() {
dicom_core::DicomValue::PixelSequence {
offset_table: _,
fragments,
} => Some(fragments[fragment as usize].clone()),
_ => None,
}
}
/// Should return either a byte slice/vector if native pixel data
/// or byte fragments if encapsulated.
/// Returns None if no pixel data is found
fn raw_pixel_data(&self) -> Option<RawPixelData> {
let pixel_data = self.element(dicom_dictionary_std::tags::PIXEL_DATA).ok()?;
match pixel_data.value() {
dicom_core::DicomValue::Primitive(p) => {
// Create 1 fragment with all bytes
let fragment = p.to_bytes().to_vec();
let mut fragments = SmallVec::new();
fragments.push(fragment);
Some(RawPixelData {
fragments,
offset_table: SmallVec::new(),
})
}
dicom_core::DicomValue::PixelSequence {
offset_table,
fragments,
} => Some(RawPixelData {
fragments: fragments.clone(),
offset_table: offset_table.clone(),
}),
dicom_core::DicomValue::Sequence { items: _, size: _ } => None,
}
}
}
#[cfg(test)]
mod tests {
use crate::meta::FileMetaTableBuilder;
use crate::FileDicomObject;
#[test]
fn smoke_test() {
const FILE_NAME: &str = ".smoke-test.dcm";
let meta = FileMetaTableBuilder::new()
.transfer_syntax(
dicom_transfer_syntax_registry::entries::EXPLICIT_VR_LITTLE_ENDIAN.uid(),
)
.media_storage_sop_class_uid("1.2.840.10008.5.1.4.1.1.1")
.media_storage_sop_instance_uid("1.2.3.456")
.implementation_class_uid("1.2.345.6.7890.1.234")
.build()
.unwrap();
let obj = FileDicomObject::new_empty_with_meta(meta);
obj.write_to_file(FILE_NAME).unwrap();
let obj2 = FileDicomObject::open_file(FILE_NAME).unwrap();
assert_eq!(obj, obj2);
let _ = std::fs::remove_file(FILE_NAME);
}
}
|
use std::collections::{HashMap, VecDeque};
use std::str::FromStr;
use crate::util::lines_from_file;
pub fn day24() {
println!("== Day 24 ==");
let input = lines_from_file("src/day24/input.txt");
let a = part_a(&input);
println!("Part A: {}", a);
let b = part_b(&input);
println!("Part B: {}", b);
}
fn part_a(input: &Vec<String>) -> i64 {
let blocks = extract_blocks(input);
let i = alu2(&blocks, 9);
let (w, x, y, z) = alu(input, i);
if z != 0 { panic!("z is not zero! {}", z) }
i
}
fn part_b(input: &Vec<String>) -> i64 {
let blocks = extract_blocks(input);
let i = alu2(&blocks, 1);
let (w, x, y, z) = alu(input, i);
if z != 0 { panic!("z is not zero! {}", z) }
i
}
fn extract_blocks(input: &Vec<String>) -> Vec<Vec<String>> {
let mut blocks = Vec::new();
let mut block = Vec::new();
for line in input.iter() {
if line == "inp w" && !block.is_empty() {
blocks.push(block);
block = Vec::new()
}
block.push(line.clone());
}
blocks.push(block);
blocks
}
fn calculate_serial(blocks: Vec<Vec<String>>) -> (i64, i64) {
for (bi, block) in blocks.iter().enumerate() {
// println!("{:?}", block);
// for n in 1..=9 {
// let number = concat(&vec![n; 14]);
// let (w,x,y,z) = alu(block, number);
// if z == 0 {
// println!("{}, {}: ({},{},{},{})", bi, n, w, x, y, z);
// }
// }
let (min, max) = try_serial(block);
println!("{}: {} , {}", bi, min, max);
}
(0, 0)
}
fn try_serial(instructions: &Vec<String>) -> (i64, i64) {
let mut numbers: Vec<i64> = Vec::new();
// To slow
// for n in 100_000_000_000_00..100_000_000_000_000{
// let digits: Vec<_> = n.to_string().chars().collect::<Vec<char>>();
// if digits.contains(&'0'){
// continue;
// }
// let (w,x,y,z) = alu(instructions, n);
// if z == 0 {
// numbers.push(n);
// }
// }
let valid = [1, 2, 3, 4, 5, 6, 7, 8, 9];
for n1 in valid {
println!("n1 is {}", n1);
for n2 in valid {
for n3 in valid {
for n4 in valid {
for n5 in valid {
for n6 in valid {
for n7 in valid {
for n8 in valid {
for n9 in valid {
for n10 in valid {
for n11 in valid {
for n12 in valid {
for n13 in valid {
for n14 in valid {
let digits = vec![n1, n2, n3, n4, n5, n6, n7, n8, n9, n10, n11, n12, n13, n14];
let number = concat(&digits);
println!("{}", number);
let (w, x, y, z) = alu(instructions, number);
if z == 0 {
numbers.push(number);
}
}
}
}
}
}
}
}
}
}
}
}
}
}
}
(*numbers.iter().min().unwrap(), *numbers.iter().max().unwrap())
}
fn concat(digits: &Vec<i64>) -> i64 {
digits.iter().fold(0, |acc, elem| acc * 10 + elem)
}
fn alu2(instructions: &Vec<Vec<String>>, start_with: i64) -> i64 {
let mut number = [start_with; 14];
let mut stack: VecDeque<(usize, i64)> = VecDeque::new();
for (i, block) in instructions.iter().enumerate() {
let div = get_value_from_line(block.get(4).unwrap());
let chk = get_value_from_line(block.get(5).unwrap());
let add = get_value_from_line(block.get(15).unwrap());
if div == 1 {
stack.push_back((i, add));
} else if div == 26 {
let (j, sadd) = stack.pop_back().unwrap();
number[i] = (number[j] + sadd + chk);
if number[i] > 9 {
number[j] = number[j] - (number[i] - 9);
number[i] = 9;
} else if number[i] < 1 {
number[j] = number[j] + (1 - number[i]);
number[i] = 1;
}
}
}
concat(&number.to_vec())
}
fn get_value_from_line(line: &String) -> i64 {
i64::from_str(line.split(" ").collect::<Vec<&str>>().get(2).unwrap()).unwrap()
}
fn alu(instructions: &Vec<String>, number: i64) -> (i64, i64, i64, i64) {
let mut registers: HashMap<&str, i64> = HashMap::from([
("w", 0),
("x", 0),
("y", 0),
("z", 0),
]);
let number_of_inps = instructions.iter().filter(|l| l.starts_with("inp")).collect::<Vec<&String>>().len();
let mut deque = if number_of_inps == 1 {
VecDeque::from([number])
} else {
let digits: Vec<_> = number.to_string().chars().map(|d| d.to_digit(10).unwrap() as i64).collect();
VecDeque::from_iter(digits.into_iter())
};
for i in instructions.iter() {
let parts = i.split(" ").collect::<Vec<&str>>();
match parts[0] {
"inp" => { registers.insert(parts[1], deque.pop_front().unwrap()); }
"add" => {
let number = if registers.contains_key(parts[2]) {
*registers.get(parts[2]).unwrap()
} else {
i64::from_str(parts[2]).unwrap()
};
*registers.entry(parts[1]).or_insert(0) += number;
}
"mul" => {
let number = if registers.contains_key(parts[2]) {
*registers.get(parts[2]).unwrap()
} else {
i64::from_str(parts[2]).unwrap()
};
*registers.entry(parts[1]).or_insert(0) *= number;
}
"div" => {
let number = if registers.contains_key(parts[2]) {
*registers.get(parts[2]).unwrap()
} else {
i64::from_str(parts[2]).unwrap()
};
*registers.entry(parts[1]).or_insert(0) /= number;
}
"mod" => {
let number = if registers.contains_key(parts[2]) {
*registers.get(parts[2]).unwrap()
} else {
i64::from_str(parts[2]).unwrap()
};
*registers.entry(parts[1]).or_insert(0) %= number;
}
"eql" => {
let number = if registers.contains_key(parts[2]) {
*registers.get(parts[2]).unwrap()
} else {
i64::from_str(parts[2]).unwrap()
};
let equal = if *registers.get(parts[1]).unwrap() == number { 1 } else { 0 };
*registers.entry(parts[1]).or_insert(0) = equal;
}
_ => unreachable!()
}
}
(
*registers.get(&"w").unwrap(),
*registers.get(&"x").unwrap(),
*registers.get(&"y").unwrap(),
*registers.get(&"z").unwrap(),
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn alu_t_0() {
let number = 13579246899999;
let input = vec![
"inp w".to_string(),
"add z w".to_string(),
"mod z 2".to_string(),
"div w 2".to_string(),
"add y w".to_string(),
"mod y 2".to_string(),
"div w 2".to_string(),
"add x w".to_string(),
"mod x 2".to_string(),
"div w 2".to_string(),
"mod w 2".to_string(),
];
let (w, x, y, z) = alu(&input, number);
println!("{} {} {} {}", w, x, y, z);
assert_eq!(1, w);
assert_eq!(1, x);
assert_eq!(1, y);
assert_eq!(1, z);
}
#[test]
fn alu_t_1() {
let number = 12345;
let input = vec![
"inp w",
"inp x",
"inp y",
"inp z",
];
let (w, x, y, z) = alu(&input.iter().map(|s| s.to_string()).collect(), number);
println!("{} {} {} {}", w, x, y, z);
assert_eq!(1, w);
assert_eq!(2, x);
assert_eq!(3, y);
assert_eq!(4, z);
}
#[test]
fn alu_t_2() {
let number = 12345;
let input = vec![
"inp w",
"inp x",
"add w x",
"inp z",
];
let (w, x, y, z) = alu(&input.iter().map(|s| s.to_string()).collect(), number);
println!("{} {} {} {}", w, x, y, z);
assert_eq!(3, w);
assert_eq!(2, x);
assert_eq!(0, y);
assert_eq!(3, z);
}
#[test]
fn alu_t_3() {
let number = 12345;
let input = vec![
"inp w",
"inp x",
"mul w x",
];
let (w, x, y, z) = alu(&input.iter().map(|s| s.to_string()).collect(), number);
println!("{} {} {} {}", w, x, y, z);
assert_eq!(2, w);
assert_eq!(2, x);
assert_eq!(0, y);
assert_eq!(0, z);
}
#[test]
fn alu_t_4() {
let number = 92;
let input = vec![
"inp w",
"inp x",
"div w x",
];
let (w, x, y, z) = alu(&input.iter().map(|s| s.to_string()).collect(), number);
println!("{} {} {} {}", w, x, y, z);
assert_eq!(4, w);
assert_eq!(2, x);
assert_eq!(0, y);
assert_eq!(0, z);
}
#[test]
fn alu_t_5() {
let number = 92;
let input = vec![
"inp w",
"inp x",
"mod w x",
];
let (w, x, y, z) = alu(&input.iter().map(|s| s.to_string()).collect(), number);
println!("{} {} {} {}", w, x, y, z);
assert_eq!(1, w);
assert_eq!(2, x);
assert_eq!(0, y);
assert_eq!(0, z);
}
#[test]
fn alu_t_6() {
let number = 9244;
let input = vec![
"inp w",
"inp x",
"eql w x",
"inp y",
"inp z",
"eql y z",
];
let (w, x, y, z) = alu(&input.iter().map(|s| s.to_string()).collect(), number);
println!("{} {} {} {}", w, x, y, z);
assert_eq!(0, w);
assert_eq!(2, x);
assert_eq!(1, y);
assert_eq!(4, z);
}
#[test]
fn alu_t_7() {
let number = 9244;
let input = vec![
"inp w",
"inp x",
"add x -23",
];
let (w, x, y, z) = alu(&input.iter().map(|s| s.to_string()).collect(), number);
println!("{} {} {} {}", w, x, y, z);
assert_eq!(9, w);
assert_eq!(-21, x);
assert_eq!(0, y);
assert_eq!(0, z);
}
#[test]
fn alu_t_8() {
let number = 9;
let filename = "src/day24/input.txt";
let input = lines_from_file(filename);
let blocks = extract_blocks(&input);
let (w, x, y, z) = alu(blocks.get(0).unwrap(), number);
println!("{} {} {} {}", w, x, y, z);
assert_eq!(9, w);
assert_eq!(1, x);
assert_eq!(10, y);
assert_eq!(10, z);
}
#[test]
fn alu_t_9() {
let filename = "src/day24/input.txt";
let input = lines_from_file(filename);
let blocks = extract_blocks(&input);
for b in blocks {
println!("{}, {}, {}", b.get(4).unwrap(), b.get(5).unwrap(), b.get(15).unwrap());
println!("{}, {}, {}", b.get(3).unwrap(), b.get(4).unwrap(), b.get(14).unwrap());
println!()
}
// let mut number = 0;
// loop{
// let (w, x, y, z) = alu(blocks.last().unwrap(), number);
// if z == 0 {
// println!("{} {} {} {}", w, x, y, z);
// break;
// }
// number += 1;
// }
}
#[test]
fn extract_blocks_t() {
let filename = "src/day24/input.txt";
let input = lines_from_file(filename);
let result = extract_blocks(&input);
println!("{:?}", result);
assert_eq!(14, result.len())
}
#[test]
fn part_a_real() {
let filename = "src/day24/input.txt";
let input = lines_from_file(filename);
let result = part_a(&input);
assert_ne!(95649919999961, result);
assert_eq!(true, 95649919999961 < result);
assert_eq!(99299513899971, result);
}
#[test]
fn part_b_real() {
let filename = "src/day24/input.txt";
let input = lines_from_file(filename);
let result = part_b(&input);
assert_eq!(93185111127911, result);
}
}
|
#[doc = "Reader of register GPIO_STATUS"]
pub type R = crate::R<u32, super::GPIO_STATUS>;
#[doc = "Reader of field `IRQTOPROC`"]
pub type IRQTOPROC_R = crate::R<bool, bool>;
#[doc = "Reader of field `IRQFROMPAD`"]
pub type IRQFROMPAD_R = crate::R<bool, bool>;
#[doc = "Reader of field `INTOPERI`"]
pub type INTOPERI_R = crate::R<bool, bool>;
#[doc = "Reader of field `INFROMPAD`"]
pub type INFROMPAD_R = crate::R<bool, bool>;
#[doc = "Reader of field `OETOPAD`"]
pub type OETOPAD_R = crate::R<bool, bool>;
#[doc = "Reader of field `OEFROMPERI`"]
pub type OEFROMPERI_R = crate::R<bool, bool>;
#[doc = "Reader of field `OUTTOPAD`"]
pub type OUTTOPAD_R = crate::R<bool, bool>;
#[doc = "Reader of field `OUTFROMPERI`"]
pub type OUTFROMPERI_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 26 - interrupt to processors, after override is applied"]
#[inline(always)]
pub fn irqtoproc(&self) -> IRQTOPROC_R {
IRQTOPROC_R::new(((self.bits >> 26) & 0x01) != 0)
}
#[doc = "Bit 24 - interrupt from pad before override is applied"]
#[inline(always)]
pub fn irqfrompad(&self) -> IRQFROMPAD_R {
IRQFROMPAD_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 19 - input signal to peripheral, after override is applied"]
#[inline(always)]
pub fn intoperi(&self) -> INTOPERI_R {
INTOPERI_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 17 - input signal from pad, before override is applied"]
#[inline(always)]
pub fn infrompad(&self) -> INFROMPAD_R {
INFROMPAD_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 13 - output enable to pad after register override is applied"]
#[inline(always)]
pub fn oetopad(&self) -> OETOPAD_R {
OETOPAD_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 12 - output enable from selected peripheral, before register override is applied"]
#[inline(always)]
pub fn oefromperi(&self) -> OEFROMPERI_R {
OEFROMPERI_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 9 - output signal to pad after register override is applied"]
#[inline(always)]
pub fn outtopad(&self) -> OUTTOPAD_R {
OUTTOPAD_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 8 - output signal from selected peripheral, before register override is applied"]
#[inline(always)]
pub fn outfromperi(&self) -> OUTFROMPERI_R {
OUTFROMPERI_R::new(((self.bits >> 8) & 0x01) != 0)
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.