text stringlengths 8 4.13M |
|---|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - power control register"]
pub power: POWER,
#[doc = "0x04 - SDI clock control register"]
pub clkcr: CLKCR,
#[doc = "0x08 - argument register"]
pub arg: ARG,
#[doc = "0x0c - command register"]
pub cmd: CMD,
#[doc = "0x10 - command response register"]
pub respcmd: RESPCMD,
#[doc = "0x14 - response 1..4 register"]
pub resp1: RESP1,
#[doc = "0x18 - response 1..4 register"]
pub resp2: RESP2,
#[doc = "0x1c - response 1..4 register"]
pub resp3: RESP3,
#[doc = "0x20 - response 1..4 register"]
pub resp4: RESP4,
#[doc = "0x24 - data timer register"]
pub dtimer: DTIMER,
#[doc = "0x28 - data length register"]
pub dlen: DLEN,
#[doc = "0x2c - data control register"]
pub dctrl: DCTRL,
#[doc = "0x30 - data counter register"]
pub dcount: DCOUNT,
#[doc = "0x34 - status register"]
pub sta: STA,
#[doc = "0x38 - interrupt clear register"]
pub icr: ICR,
#[doc = "0x3c - mask register"]
pub mask: MASK,
_reserved16: [u8; 0x08],
#[doc = "0x48 - FIFO counter register"]
pub fifocnt: FIFOCNT,
_reserved17: [u8; 0x34],
#[doc = "0x80 - data FIFO register"]
pub fifo: FIFO,
}
#[doc = "POWER (rw) register accessor: power control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`power::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`power::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`power`]
module"]
pub type POWER = crate::Reg<power::POWER_SPEC>;
#[doc = "power control register"]
pub mod power;
#[doc = "CLKCR (rw) register accessor: SDI clock control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`clkcr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`clkcr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`clkcr`]
module"]
pub type CLKCR = crate::Reg<clkcr::CLKCR_SPEC>;
#[doc = "SDI clock control register"]
pub mod clkcr;
#[doc = "ARG (rw) register accessor: argument register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`arg::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`arg::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`arg`]
module"]
pub type ARG = crate::Reg<arg::ARG_SPEC>;
#[doc = "argument register"]
pub mod arg;
#[doc = "CMD (rw) register accessor: command register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cmd::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cmd::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`cmd`]
module"]
pub type CMD = crate::Reg<cmd::CMD_SPEC>;
#[doc = "command register"]
pub mod cmd;
#[doc = "RESPCMD (r) register accessor: command response register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`respcmd::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`respcmd`]
module"]
pub type RESPCMD = crate::Reg<respcmd::RESPCMD_SPEC>;
#[doc = "command response register"]
pub mod respcmd;
#[doc = "RESP1 (r) register accessor: response 1..4 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`resp1::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`resp1`]
module"]
pub type RESP1 = crate::Reg<resp1::RESP1_SPEC>;
#[doc = "response 1..4 register"]
pub mod resp1;
#[doc = "RESP2 (r) register accessor: response 1..4 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`resp2::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`resp2`]
module"]
pub type RESP2 = crate::Reg<resp2::RESP2_SPEC>;
#[doc = "response 1..4 register"]
pub mod resp2;
#[doc = "RESP3 (r) register accessor: response 1..4 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`resp3::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`resp3`]
module"]
pub type RESP3 = crate::Reg<resp3::RESP3_SPEC>;
#[doc = "response 1..4 register"]
pub mod resp3;
#[doc = "RESP4 (r) register accessor: response 1..4 register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`resp4::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`resp4`]
module"]
pub type RESP4 = crate::Reg<resp4::RESP4_SPEC>;
#[doc = "response 1..4 register"]
pub mod resp4;
#[doc = "DTIMER (rw) register accessor: data timer register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dtimer::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dtimer::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dtimer`]
module"]
pub type DTIMER = crate::Reg<dtimer::DTIMER_SPEC>;
#[doc = "data timer register"]
pub mod dtimer;
#[doc = "DLEN (rw) register accessor: data length register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dlen::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dlen::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dlen`]
module"]
pub type DLEN = crate::Reg<dlen::DLEN_SPEC>;
#[doc = "data length register"]
pub mod dlen;
#[doc = "DCTRL (rw) register accessor: data control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dctrl::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dctrl::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dctrl`]
module"]
pub type DCTRL = crate::Reg<dctrl::DCTRL_SPEC>;
#[doc = "data control register"]
pub mod dctrl;
#[doc = "DCOUNT (r) register accessor: data counter register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dcount::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`dcount`]
module"]
pub type DCOUNT = crate::Reg<dcount::DCOUNT_SPEC>;
#[doc = "data counter register"]
pub mod dcount;
#[doc = "STA (r) register accessor: status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sta::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`sta`]
module"]
pub type STA = crate::Reg<sta::STA_SPEC>;
#[doc = "status register"]
pub mod sta;
#[doc = "ICR (rw) register accessor: interrupt clear register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`icr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`icr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`icr`]
module"]
pub type ICR = crate::Reg<icr::ICR_SPEC>;
#[doc = "interrupt clear register"]
pub mod icr;
#[doc = "MASK (rw) register accessor: mask register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mask::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mask::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`mask`]
module"]
pub type MASK = crate::Reg<mask::MASK_SPEC>;
#[doc = "mask register"]
pub mod mask;
#[doc = "FIFOCNT (r) register accessor: FIFO counter register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fifocnt::R`]. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fifocnt`]
module"]
pub type FIFOCNT = crate::Reg<fifocnt::FIFOCNT_SPEC>;
#[doc = "FIFO counter register"]
pub mod fifocnt;
#[doc = "FIFO (rw) register accessor: data FIFO register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fifo::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fifo::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`fifo`]
module"]
pub type FIFO = crate::Reg<fifo::FIFO_SPEC>;
#[doc = "data FIFO register"]
pub mod fifo;
|
use crate::{
auth::UserDetail,
server::{
chancomms::ControlChanMsg,
controlchan::{
error::ControlChanError,
handler::{CommandContext, CommandHandler},
Reply, ReplyCode,
},
},
storage::{Metadata, StorageBackend},
};
use async_trait::async_trait;
use chrono::{offset::Utc, DateTime};
use std::{path::PathBuf, sync::Arc};
use tokio::sync::mpsc::Sender;
const RFC3659_TIME: &str = "%Y%m%d%H%M%S";
#[derive(Debug)]
pub struct Mdtm {
path: PathBuf,
}
impl Mdtm {
pub fn new(path: PathBuf) -> Self {
Mdtm { path }
}
}
#[async_trait]
impl<Storage, User> CommandHandler<Storage, User> for Mdtm
where
User: UserDetail,
Storage: StorageBackend<User> + 'static,
Storage::Metadata: 'static + Metadata,
{
#[tracing_attributes::instrument]
async fn handle(&self, args: CommandContext<Storage, User>) -> Result<Reply, ControlChanError> {
let session = args.session.lock().await;
let user = session.user.clone();
let storage = Arc::clone(&session.storage);
let path = session.cwd.join(self.path.clone());
let tx_success: Sender<ControlChanMsg> = args.tx_control_chan.clone();
let tx_fail: Sender<ControlChanMsg> = args.tx_control_chan.clone();
let logger = args.logger;
tokio::spawn(async move {
match storage.metadata((*user).as_ref().unwrap(), &path).await {
Ok(metadata) => {
let modification_time = match metadata.modified() {
Ok(v) => Some(v),
Err(err) => {
slog::warn!(
logger,
"MDTM: Could not get the modified time from the fetched metadata for path {:?}: {}",
path,
err
);
if let Err(err) = tx_fail.send(ControlChanMsg::StorageError(err)).await {
slog::warn!(logger, "MDTM: Could not send internal message to notify of MDTM failure: {}", err);
};
None
}
};
if let Some(mtime) = modification_time {
slog::info!(logger, "MDTM: Successfully fetched modification time for path {:?}", path);
if let Err(err) = tx_success
.send(ControlChanMsg::CommandChannelReply(Reply::new_with_string(
ReplyCode::FileStatus,
DateTime::<Utc>::from(mtime).format(RFC3659_TIME).to_string(),
)))
.await
{
slog::warn!(logger, "MDTM: Could not send internal message to notify of MDTM success: {}", err);
}
}
}
Err(err) => {
if let Err(err) = tx_fail.send(ControlChanMsg::StorageError(err)).await {
slog::warn!(logger, "{}", err);
}
}
}
});
Ok(Reply::none())
}
}
|
#![allow(
clippy::too_many_arguments,
clippy::new_without_default,
clippy::type_complexity
)]
use crate::ffi::*;
use crate::os::{HRESULT, LPCWSTR, LPWSTR, WCHAR};
use crate::utils::{from_wide, to_wide, HassleError};
use com_rs::ComPtr;
use libloading::{Library, Symbol};
use std::ffi::c_void;
use std::path::{Path, PathBuf};
use std::pin::Pin;
#[macro_export]
macro_rules! check_hr {
($hr:expr, $v: expr) => {{
let hr = $hr;
if hr == 0 {
Ok($v)
} else {
Err(hr)
}
}};
}
macro_rules! check_hr_wrapped {
($hr:expr, $v: expr) => {{
let hr = $hr;
if hr == 0 {
Ok($v)
} else {
Err(HassleError::Win32Error(hr))
}
}};
}
#[derive(Debug)]
pub struct DxcBlob {
inner: ComPtr<IDxcBlob>,
}
impl DxcBlob {
fn new(inner: ComPtr<IDxcBlob>) -> Self {
Self { inner }
}
pub fn as_slice<T>(&self) -> &[T] {
unsafe {
std::slice::from_raw_parts(
self.inner.get_buffer_pointer() as *const T,
self.inner.get_buffer_size() / std::mem::size_of::<T>(),
)
}
}
pub fn as_mut_slice<T>(&mut self) -> &mut [T] {
unsafe {
std::slice::from_raw_parts_mut(
self.inner.get_buffer_pointer() as *mut T,
self.inner.get_buffer_size() / std::mem::size_of::<T>(),
)
}
}
pub fn to_vec<T>(&self) -> Vec<T>
where
T: Clone,
{
self.as_slice().to_vec()
}
}
impl AsRef<[u8]> for DxcBlob {
fn as_ref(&self) -> &[u8] {
self.as_slice()
}
}
impl AsMut<[u8]> for DxcBlob {
fn as_mut(&mut self) -> &mut [u8] {
self.as_mut_slice()
}
}
#[derive(Debug)]
pub struct DxcBlobEncoding {
inner: ComPtr<IDxcBlobEncoding>,
}
impl DxcBlobEncoding {
fn new(inner: ComPtr<IDxcBlobEncoding>) -> Self {
Self { inner }
}
}
impl From<DxcBlobEncoding> for DxcBlob {
fn from(encoded_blob: DxcBlobEncoding) -> Self {
DxcBlob::new((&encoded_blob.inner).into())
}
}
#[derive(Debug)]
pub struct DxcOperationResult {
inner: ComPtr<IDxcOperationResult>,
}
impl DxcOperationResult {
fn new(inner: ComPtr<IDxcOperationResult>) -> Self {
Self { inner }
}
pub fn get_status(&self) -> Result<u32, HRESULT> {
let mut status: u32 = 0;
check_hr!(unsafe { self.inner.get_status(&mut status) }, status)
}
pub fn get_result(&self) -> Result<DxcBlob, HRESULT> {
let mut blob: ComPtr<IDxcBlob> = ComPtr::new();
check_hr!(
unsafe { self.inner.get_result(blob.as_mut_ptr()) },
DxcBlob::new(blob)
)
}
pub fn get_error_buffer(&self) -> Result<DxcBlobEncoding, HRESULT> {
let mut blob: ComPtr<IDxcBlobEncoding> = ComPtr::new();
check_hr!(
unsafe { self.inner.get_error_buffer(blob.as_mut_ptr()) },
DxcBlobEncoding::new(blob)
)
}
}
pub trait DxcIncludeHandler {
fn load_source(&self, filename: String) -> Option<String>;
}
#[repr(C)]
struct DxcIncludeHandlerWrapperVtbl {
query_interface: extern "system" fn(
*const com_rs::IUnknown,
&com_rs::IID,
*mut *mut core::ffi::c_void,
) -> com_rs::HResult,
add_ref: extern "system" fn(*const com_rs::IUnknown) -> HRESULT,
release: extern "system" fn(*const com_rs::IUnknown) -> HRESULT,
#[cfg(not(windows))]
complete_object_destructor: extern "system" fn(*const com_rs::IUnknown) -> HRESULT,
#[cfg(not(windows))]
deleting_destructor: extern "system" fn(*const com_rs::IUnknown) -> HRESULT,
load_source:
extern "system" fn(*mut com_rs::IUnknown, LPCWSTR, *mut *mut IDxcBlob) -> com_rs::HResult,
}
#[repr(C)]
struct DxcIncludeHandlerWrapper<'a> {
vtable: Box<DxcIncludeHandlerWrapperVtbl>,
handler: Box<dyn DxcIncludeHandler>,
pinned: Vec<Pin<String>>,
library: &'a DxcLibrary,
}
impl<'a> DxcIncludeHandlerWrapper<'a> {
extern "system" fn query_interface(
_me: *const com_rs::IUnknown,
_rrid: &com_rs::IID,
_ppv_obj: *mut *mut core::ffi::c_void,
) -> com_rs::HResult {
0 // dummy impl
}
extern "system" fn dummy(_me: *const com_rs::IUnknown) -> HRESULT {
0 // dummy impl
}
extern "system" fn load_source(
me: *mut com_rs::IUnknown,
filename: LPCWSTR,
include_source: *mut *mut IDxcBlob,
) -> com_rs::HResult {
let me = me as *mut DxcIncludeHandlerWrapper;
let filename = crate::utils::from_wide(filename as *mut _);
let source = unsafe { (*me).handler.load_source(filename) };
if let Some(source) = source {
let source = Pin::new(source);
let mut blob = unsafe {
(*me)
.library
.create_blob_with_encoding_from_str(&source)
.unwrap()
};
unsafe {
blob.inner.add_ref();
*include_source = *blob.inner.as_mut_ptr();
(*me).pinned.push(source);
}
0
} else {
-2_147_024_894 // ERROR_FILE_NOT_FOUND / 0x80070002
}
}
}
#[derive(Debug)]
pub struct DxcCompiler {
inner: ComPtr<IDxcCompiler2>,
library: DxcLibrary,
}
impl DxcCompiler {
fn new(inner: ComPtr<IDxcCompiler2>, library: DxcLibrary) -> Self {
Self { inner, library }
}
fn prep_defines(
defines: &[(&str, Option<&str>)],
wide_defines: &mut Vec<(Vec<WCHAR>, Vec<WCHAR>)>,
dxc_defines: &mut Vec<DxcDefine>,
) {
for (name, value) in defines {
if value.is_none() {
wide_defines.push((to_wide(name), to_wide("1")));
} else {
wide_defines.push((to_wide(name), to_wide(value.unwrap())));
}
}
for (ref name, ref value) in wide_defines {
dxc_defines.push(DxcDefine {
name: name.as_ptr(),
value: value.as_ptr(),
});
}
}
fn prep_args(args: &[&str], wide_args: &mut Vec<Vec<WCHAR>>, dxc_args: &mut Vec<LPCWSTR>) {
for a in args {
wide_args.push(to_wide(a));
}
for a in wide_args {
dxc_args.push(a.as_ptr());
}
}
fn prep_include_handler(
library: &DxcLibrary,
include_handler: Option<Box<dyn DxcIncludeHandler>>,
) -> Option<Box<DxcIncludeHandlerWrapper>> {
if let Some(include_handler) = include_handler {
let vtable = DxcIncludeHandlerWrapperVtbl {
query_interface: DxcIncludeHandlerWrapper::query_interface,
add_ref: DxcIncludeHandlerWrapper::dummy,
release: DxcIncludeHandlerWrapper::dummy,
#[cfg(not(windows))]
complete_object_destructor: DxcIncludeHandlerWrapper::dummy,
#[cfg(not(windows))]
deleting_destructor: DxcIncludeHandlerWrapper::dummy,
load_source: DxcIncludeHandlerWrapper::load_source,
};
Some(Box::new(DxcIncludeHandlerWrapper {
vtable: Box::new(vtable),
handler: include_handler,
library,
pinned: vec![],
}))
} else {
None
}
}
pub fn compile(
&self,
blob: &DxcBlobEncoding,
source_name: &str,
entry_point: &str,
target_profile: &str,
args: &[&str],
include_handler: Option<Box<dyn DxcIncludeHandler>>,
defines: &[(&str, Option<&str>)],
) -> Result<DxcOperationResult, (DxcOperationResult, HRESULT)> {
let mut wide_args = vec![];
let mut dxc_args = vec![];
Self::prep_args(args, &mut wide_args, &mut dxc_args);
let mut wide_defines = vec![];
let mut dxc_defines = vec![];
Self::prep_defines(defines, &mut wide_defines, &mut dxc_defines);
let handler_wrapper = Self::prep_include_handler(&self.library, include_handler);
let mut result: ComPtr<IDxcOperationResult> = ComPtr::new();
let result_hr = unsafe {
self.inner.compile(
blob.inner.as_ptr(),
to_wide(source_name).as_ptr(),
to_wide(entry_point).as_ptr(),
to_wide(target_profile).as_ptr(),
dxc_args.as_ptr(),
dxc_args.len() as u32,
dxc_defines.as_ptr(),
dxc_defines.len() as u32,
handler_wrapper
.as_ref()
.map_or(std::ptr::null(), |v| &**v as *const _ as *const _),
result.as_mut_ptr(),
)
};
let mut compile_error = 0u32;
unsafe {
result.get_status(&mut compile_error);
}
if result_hr == 0 && compile_error == 0 {
Ok(DxcOperationResult::new(result))
} else {
Err((DxcOperationResult::new(result), result_hr))
}
}
pub fn compile_with_debug(
&self,
blob: &DxcBlobEncoding,
source_name: &str,
entry_point: &str,
target_profile: &str,
args: &[&str],
include_handler: Option<Box<dyn DxcIncludeHandler>>,
defines: &[(&str, Option<&str>)],
) -> Result<(DxcOperationResult, String, DxcBlob), (DxcOperationResult, HRESULT)> {
let mut wide_args = vec![];
let mut dxc_args = vec![];
Self::prep_args(args, &mut wide_args, &mut dxc_args);
let mut wide_defines = vec![];
let mut dxc_defines = vec![];
Self::prep_defines(defines, &mut wide_defines, &mut dxc_defines);
let handler_wrapper = Self::prep_include_handler(&self.library, include_handler);
let mut result: ComPtr<IDxcOperationResult> = ComPtr::new();
let mut debug_blob: ComPtr<IDxcBlob> = ComPtr::new();
let mut debug_filename: LPWSTR = std::ptr::null_mut();
let result_hr = unsafe {
self.inner.compile_with_debug(
blob.inner.as_ptr(),
to_wide(source_name).as_ptr(),
to_wide(entry_point).as_ptr(),
to_wide(target_profile).as_ptr(),
dxc_args.as_ptr(),
dxc_args.len() as u32,
dxc_defines.as_ptr(),
dxc_defines.len() as u32,
handler_wrapper
.as_ref()
.map_or(std::ptr::null(), |v| &**v as *const _ as *const _),
result.as_mut_ptr(),
&mut debug_filename,
debug_blob.as_mut_ptr(),
)
};
let mut compile_error = 0u32;
unsafe {
result.get_status(&mut compile_error);
}
if result_hr == 0 && compile_error == 0 {
Ok((
DxcOperationResult::new(result),
from_wide(debug_filename),
DxcBlob::new(debug_blob),
))
} else {
Err((DxcOperationResult::new(result), result_hr))
}
}
pub fn preprocess(
&self,
blob: &DxcBlobEncoding,
source_name: &str,
args: &[&str],
include_handler: Option<Box<dyn DxcIncludeHandler>>,
defines: &[(&str, Option<&str>)],
) -> Result<DxcOperationResult, (DxcOperationResult, HRESULT)> {
let mut wide_args = vec![];
let mut dxc_args = vec![];
Self::prep_args(args, &mut wide_args, &mut dxc_args);
let mut wide_defines = vec![];
let mut dxc_defines = vec![];
Self::prep_defines(defines, &mut wide_defines, &mut dxc_defines);
let handler_wrapper = Self::prep_include_handler(&self.library, include_handler);
let mut result: ComPtr<IDxcOperationResult> = ComPtr::new();
let result_hr = unsafe {
self.inner.preprocess(
blob.inner.as_ptr(),
to_wide(source_name).as_ptr(),
dxc_args.as_ptr(),
dxc_args.len() as u32,
dxc_defines.as_ptr(),
dxc_defines.len() as u32,
handler_wrapper
.as_ref()
.map_or(std::ptr::null(), |v| &**v as *const _ as *const _),
result.as_mut_ptr(),
)
};
let mut compile_error = 0u32;
unsafe {
result.get_status(&mut compile_error);
}
if result_hr == 0 && compile_error == 0 {
Ok(DxcOperationResult::new(result))
} else {
Err((DxcOperationResult::new(result), result_hr))
}
}
pub fn disassemble(&self, blob: &DxcBlob) -> Result<DxcBlobEncoding, HRESULT> {
let mut result_blob: ComPtr<IDxcBlobEncoding> = ComPtr::new();
check_hr!(
unsafe {
self.inner
.disassemble(blob.inner.as_ptr(), result_blob.as_mut_ptr())
},
DxcBlobEncoding::new(result_blob)
)
}
}
#[derive(Debug)]
pub struct DxcLibrary {
inner: ComPtr<IDxcLibrary>,
}
impl DxcLibrary {
fn new(inner: ComPtr<IDxcLibrary>) -> Self {
Self { inner }
}
pub fn create_blob_with_encoding(&self, data: &[u8]) -> Result<DxcBlobEncoding, HRESULT> {
let mut blob: ComPtr<IDxcBlobEncoding> = ComPtr::new();
check_hr!(
unsafe {
self.inner.create_blob_with_encoding_from_pinned(
data.as_ptr() as *const c_void,
data.len() as u32,
0, // Binary; no code page
blob.as_mut_ptr(),
)
},
DxcBlobEncoding::new(blob)
)
}
pub fn create_blob_with_encoding_from_str(
&self,
text: &str,
) -> Result<DxcBlobEncoding, HRESULT> {
let mut blob: ComPtr<IDxcBlobEncoding> = ComPtr::new();
const CP_UTF8: u32 = 65001; // UTF-8 translation
check_hr!(
unsafe {
self.inner.create_blob_with_encoding_from_pinned(
text.as_ptr() as *const c_void,
text.len() as u32,
CP_UTF8,
blob.as_mut_ptr(),
)
},
DxcBlobEncoding::new(blob)
)
}
pub fn get_blob_as_string(&self, blob: &DxcBlobEncoding) -> String {
let mut blob_utf8: ComPtr<IDxcBlobEncoding> = ComPtr::new();
unsafe {
self.inner
.get_blob_as_utf8(blob.inner.as_ptr(), blob_utf8.as_mut_ptr())
};
let slice = unsafe {
std::slice::from_raw_parts(
blob_utf8.get_buffer_pointer() as *const u8,
blob_utf8.get_buffer_size(),
)
};
String::from_utf8(slice.to_vec()).unwrap()
}
}
#[derive(Debug)]
pub struct Dxc {
dxc_lib: Library,
}
#[cfg(target_os = "windows")]
fn dxcompiler_lib_name() -> &'static Path {
Path::new("dxcompiler.dll")
}
#[cfg(target_os = "linux")]
fn dxcompiler_lib_name() -> &'static Path {
Path::new("./libdxcompiler.so")
}
#[cfg(target_os = "macos")]
fn dxcompiler_lib_name() -> &'static Path {
Path::new("./libdxcompiler.dynlib")
}
impl Dxc {
/// `dxc_path` can point to a library directly or the directory containing the library,
/// in which case the appended filename depends on the platform.
pub fn new(lib_path: Option<PathBuf>) -> Result<Self, HassleError> {
let lib_path = if let Some(lib_path) = lib_path {
if lib_path.is_file() {
lib_path
} else {
lib_path.join(&dxcompiler_lib_name())
}
} else {
dxcompiler_lib_name().to_owned()
};
let dxc_lib =
unsafe { Library::new(&lib_path) }.map_err(|e| HassleError::LoadLibraryError {
filename: lib_path,
inner: e,
})?;
Ok(Self { dxc_lib })
}
pub(crate) fn get_dxc_create_instance(
&self,
) -> Result<Symbol<DxcCreateInstanceProc>, HassleError> {
Ok(unsafe { self.dxc_lib.get(b"DxcCreateInstance\0")? })
}
pub fn create_compiler(&self) -> Result<DxcCompiler, HassleError> {
let mut compiler: ComPtr<IDxcCompiler2> = ComPtr::new();
check_hr_wrapped!(
self.get_dxc_create_instance()?(
&CLSID_DxcCompiler,
&IID_IDxcCompiler2,
compiler.as_mut_ptr(),
),
DxcCompiler::new(compiler, self.create_library()?)
)
}
pub fn create_library(&self) -> Result<DxcLibrary, HassleError> {
let mut library: ComPtr<IDxcLibrary> = ComPtr::new();
check_hr_wrapped!(
self.get_dxc_create_instance()?(
&CLSID_DxcLibrary,
&IID_IDxcLibrary,
library.as_mut_ptr(),
),
DxcLibrary::new(library)
)
}
}
#[derive(Debug)]
pub struct DxcValidator {
inner: ComPtr<IDxcValidator>,
}
pub type DxcValidatorVersion = (u32, u32);
impl DxcValidator {
fn new(inner: ComPtr<IDxcValidator>) -> Self {
Self { inner }
}
pub fn version(&self) -> Result<DxcValidatorVersion, HRESULT> {
let mut version: ComPtr<IDxcVersionInfo> = ComPtr::new();
let result_hr = unsafe {
self.inner
.query_interface(&IID_IDxcVersionInfo, version.as_mut_ptr())
};
if result_hr != 0 {
return Err(result_hr);
}
let mut major = 0;
let mut minor = 0;
check_hr! {
unsafe { version.get_version(&mut major, &mut minor) },
(major, minor)
}
}
pub fn validate(&self, blob: DxcBlob) -> Result<DxcBlob, (DxcOperationResult, HRESULT)> {
let mut result: ComPtr<IDxcOperationResult> = ComPtr::new();
let result_hr = unsafe {
self.inner.validate(
blob.inner.as_ptr(),
DXC_VALIDATOR_FLAGS_IN_PLACE_EDIT,
result.as_mut_ptr(),
)
};
let mut validate_status = 0u32;
unsafe { result.get_status(&mut validate_status) };
if result_hr == 0 && validate_status == 0 {
Ok(blob)
} else {
Err((DxcOperationResult::new(result), result_hr))
}
}
}
#[derive(Debug)]
pub struct Dxil {
dxil_lib: Library,
}
impl Dxil {
#[cfg(not(windows))]
pub fn new(_: Option<PathBuf>) -> Result<Self, HassleError> {
Err(HassleError::WindowsOnly(
"DXIL Signing is only supported on Windows".to_string(),
))
}
/// `dxil_path` can point to a library directly or the directory containing the library,
/// in which case `dxil.dll` is appended.
#[cfg(windows)]
pub fn new(lib_path: Option<PathBuf>) -> Result<Self, HassleError> {
let lib_path = if let Some(lib_path) = lib_path {
if lib_path.is_file() {
lib_path
} else {
lib_path.join("dxil.dll")
}
} else {
PathBuf::from("dxil.dll")
};
let dxil_lib =
unsafe { Library::new(&lib_path) }.map_err(|e| HassleError::LoadLibraryError {
filename: lib_path.to_owned(),
inner: e,
})?;
Ok(Self { dxil_lib })
}
fn get_dxc_create_instance(&self) -> Result<Symbol<DxcCreateInstanceProc>, HassleError> {
Ok(unsafe { self.dxil_lib.get(b"DxcCreateInstance\0")? })
}
pub fn create_validator(&self) -> Result<DxcValidator, HassleError> {
let mut validator: ComPtr<IDxcValidator> = ComPtr::new();
check_hr_wrapped!(
self.get_dxc_create_instance()?(
&CLSID_DxcValidator,
&IID_IDxcValidator,
validator.as_mut_ptr(),
),
DxcValidator::new(validator)
)
}
}
|
// Copyright 2019, 2020 Wingchain
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use crypto::address::AddressImpl;
use crypto::dsa::DsaImpl;
use node_consensus_base::ConsensusInMessage;
use node_executor::module;
use primitives::codec::Decode;
use primitives::Address;
use utils_test::test_accounts;
mod base;
#[tokio::test]
async fn test_poa_contract_token_read() {
let _ = env_logger::try_init();
let dsa = Arc::new(DsaImpl::Ed25519);
let address = Arc::new(AddressImpl::Blake2b160);
let test_accounts = test_accounts(dsa, address);
let (account1, _account2) = (&test_accounts[0], &test_accounts[1]);
let authority_accounts = [account1];
let (chain, txpool, consensus) = base::get_standalone_service(&authority_accounts, &account1);
let ori_code = get_code().to_vec();
let tx1_hash = base::insert_tx(
&chain,
&txpool,
chain
.build_transaction(
Some((account1.secret_key.clone(), 0, 10)),
chain.build_call("contract".to_string(),
"create".to_string(),
module::contract::CreateParams {
code: ori_code.clone(),
init_pay_value: 0,
init_method: "init".to_string(),
init_params: r#"{"name":"Bitcoin","symbol":"BTC","decimals":8,"total_supply":2100000000000000}"#.as_bytes().to_vec(),
}).unwrap(),
)
.unwrap(),
)
.await;
base::wait_txpool(&txpool, 1).await;
// generate block 1
consensus
.in_message_tx()
.unbounded_send(ConsensusInMessage::Generate)
.unwrap();
base::wait_block_execution(&chain, 1).await;
let tx1_receipt = chain.get_receipt(&tx1_hash).unwrap().unwrap();
let tx1_result = tx1_receipt.result.unwrap();
let contract_address: Address = Decode::decode(&mut &tx1_result[..]).unwrap();
log::info!("contract_address: {:x?}", contract_address);
// name
let result: Vec<u8> = chain
.execute_call_with_block_number(
&1,
Some(&account1.address),
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
method: "name".to_string(),
params: r#""#.as_bytes().to_vec(),
pay_value: 0,
},
)
.unwrap()
.unwrap();
let result = String::from_utf8(result).unwrap();
log::info!("result: {}", result);
assert_eq!(result, r#""Bitcoin""#.to_string(),);
// symbol
let result: Vec<u8> = chain
.execute_call_with_block_number(
&1,
Some(&account1.address),
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
method: "symbol".to_string(),
params: r#""#.as_bytes().to_vec(),
pay_value: 0,
},
)
.unwrap()
.unwrap();
let result = String::from_utf8(result).unwrap();
log::info!("result: {}", result);
assert_eq!(result, r#""BTC""#.to_string(),);
// decimals
let result: Vec<u8> = chain
.execute_call_with_block_number(
&1,
Some(&account1.address),
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
method: "decimals".to_string(),
params: r#""#.as_bytes().to_vec(),
pay_value: 0,
},
)
.unwrap()
.unwrap();
let result = String::from_utf8(result).unwrap();
log::info!("result: {}", result);
assert_eq!(result, r#"8"#.to_string(),);
// total supply
let result: Vec<u8> = chain
.execute_call_with_block_number(
&1,
Some(&account1.address),
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
method: "total_supply".to_string(),
params: r#""#.as_bytes().to_vec(),
pay_value: 0,
},
)
.unwrap()
.unwrap();
let result = String::from_utf8(result).unwrap();
log::info!("result: {}", result);
assert_eq!(result, r#"2100000000000000"#.to_string(),);
// issuer balance
let result: Vec<u8> = chain
.execute_call_with_block_number(
&1,
Some(&account1.address),
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
method: "balance".to_string(),
params: format!(
r#"{{"address":"{}"}}"#,
Address((account1.address).0.clone())
)
.as_bytes()
.to_vec(),
pay_value: 0,
},
)
.unwrap()
.unwrap();
let result = String::from_utf8(result).unwrap();
log::info!("result: {}", result);
assert_eq!(result, r#"2100000000000000"#.to_string(),);
}
#[tokio::test]
async fn test_poa_contract_token_transfer() {
let _ = env_logger::try_init();
let dsa = Arc::new(DsaImpl::Ed25519);
let address = Arc::new(AddressImpl::Blake2b160);
let test_accounts = test_accounts(dsa, address);
let (account1, account2) = (&test_accounts[0], &test_accounts[1]);
let authority_accounts = [account1];
let (chain, txpool, consensus) = base::get_standalone_service(&authority_accounts, account1);
let ori_code = get_code().to_vec();
let tx1_hash = base::insert_tx(
&chain,
&txpool,
chain
.build_transaction(
Some((account1.secret_key.clone(), 0, 10)),
chain.build_call("contract".to_string(),
"create".to_string(),
module::contract::CreateParams {
code: ori_code.clone(),
init_pay_value: 0,
init_method: "init".to_string(),
init_params: r#"{"name":"Bitcoin","symbol":"BTC","decimals":8,"total_supply":2100000000000000}"#.as_bytes().to_vec(),
}).unwrap(),
)
.unwrap(),
)
.await;
base::wait_txpool(&txpool, 1).await;
// generate block 1
consensus
.in_message_tx()
.unbounded_send(ConsensusInMessage::Generate)
.unwrap();
base::wait_block_execution(&chain, 1).await;
let tx1_receipt = chain.get_receipt(&tx1_hash).unwrap().unwrap();
let tx1_result = tx1_receipt.result.unwrap();
let contract_address: Address = Decode::decode(&mut &tx1_result[..]).unwrap();
log::info!("contract_address: {:x?}", contract_address);
let _tx1_hash = base::insert_tx(
&chain,
&txpool,
chain
.build_transaction(
Some((account1.secret_key.clone(), 0, 10)),
chain
.build_call(
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
pay_value: 0,
method: "transfer".to_string(),
params: format!(
r#"{{"recipient":"{}","value":100000000000000}}"#,
Address((account2.address).0.clone())
)
.as_bytes()
.to_vec(),
},
)
.unwrap(),
)
.unwrap(),
)
.await;
base::wait_txpool(&txpool, 1).await;
// generate block 2
consensus
.in_message_tx()
.unbounded_send(ConsensusInMessage::Generate)
.unwrap();
base::wait_block_execution(&chain, 2).await;
// sender balance
let result: Vec<u8> = chain
.execute_call_with_block_number(
&2,
Some(&account1.address),
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
method: "balance".to_string(),
params: format!(
r#"{{"address":"{}"}}"#,
Address((account1.address).0.clone())
)
.as_bytes()
.to_vec(),
pay_value: 0,
},
)
.unwrap()
.unwrap();
let result = String::from_utf8(result).unwrap();
log::info!("result: {}", result);
assert_eq!(result, r#"2000000000000000"#.to_string(),);
// recipient balance
let result: Vec<u8> = chain
.execute_call_with_block_number(
&2,
Some(&account1.address),
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
method: "balance".to_string(),
params: format!(
r#"{{"address":"{}"}}"#,
Address((account2.address).0.clone())
)
.as_bytes()
.to_vec(),
pay_value: 0,
},
)
.unwrap()
.unwrap();
let result = String::from_utf8(result).unwrap();
log::info!("result: {}", result);
assert_eq!(result, r#"100000000000000"#.to_string(),);
}
#[tokio::test]
async fn test_poa_contract_token_transfer_from() {
let _ = env_logger::try_init();
let dsa = Arc::new(DsaImpl::Ed25519);
let address = Arc::new(AddressImpl::Blake2b160);
let test_accounts = test_accounts(dsa, address);
let (account1, account2) = (&test_accounts[0], &test_accounts[1]);
let authority_accounts = [account1];
let (chain, txpool, consensus) = base::get_standalone_service(&authority_accounts, account1);
let ori_code = get_code().to_vec();
let tx1_hash = base::insert_tx(
&chain,
&txpool,
chain
.build_transaction(
Some((account1.secret_key.clone(), 0, 10)),
chain.build_call("contract".to_string(),
"create".to_string(),
module::contract::CreateParams {
code: ori_code.clone(),
init_pay_value: 0,
init_method: "init".to_string(),
init_params: r#"{"name":"Bitcoin","symbol":"BTC","decimals":8,"total_supply":2100000000000000}"#.as_bytes().to_vec(),
}).unwrap(),
)
.unwrap(),
)
.await;
base::wait_txpool(&txpool, 1).await;
// generate block 1
consensus
.in_message_tx()
.unbounded_send(ConsensusInMessage::Generate)
.unwrap();
base::wait_block_execution(&chain, 1).await;
let tx1_receipt = chain.get_receipt(&tx1_hash).unwrap().unwrap();
let tx1_result = tx1_receipt.result.unwrap();
let contract_address: Address = Decode::decode(&mut &tx1_result[..]).unwrap();
log::info!("contract_address: {:x?}", contract_address);
let _tx1_hash = base::insert_tx(
&chain,
&txpool,
chain
.build_transaction(
Some((account1.secret_key.clone(), 0, 10)),
chain
.build_call(
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
pay_value: 0,
method: "approve".to_string(),
params: format!(
r#"{{"spender":"{}","value":100000000000000}}"#,
Address((account2.address).0.clone())
)
.as_bytes()
.to_vec(),
},
)
.unwrap(),
)
.unwrap(),
)
.await;
base::wait_txpool(&txpool, 1).await;
// generate block 2
consensus
.in_message_tx()
.unbounded_send(ConsensusInMessage::Generate)
.unwrap();
base::wait_block_execution(&chain, 2).await;
// check allowance after approving
let result: Vec<u8> = chain
.execute_call_with_block_number(
&2,
Some(&account1.address),
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
method: "allowance".to_string(),
params: format!(
r#"{{"owner":"{}","spender":"{}"}}"#,
Address((account1.address).0.clone()),
Address((account2.address).0.clone())
)
.as_bytes()
.to_vec(),
pay_value: 0,
},
)
.unwrap()
.unwrap();
let result = String::from_utf8(result).unwrap();
log::info!("result: {}", result);
assert_eq!(result, r#"100000000000000"#.to_string(),);
let _tx1_hash = base::insert_tx(
&chain,
&txpool,
chain
.build_transaction(
Some((account2.secret_key.clone(), 0, 10)),
chain
.build_call(
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
pay_value: 0,
method: "transfer_from".to_string(),
params: format!(
r#"{{"sender":"{}","recipient":"{}","value":100000000}}"#,
Address((account1.address).0.clone()),
Address((account2.address).0.clone())
)
.as_bytes()
.to_vec(),
},
)
.unwrap(),
)
.unwrap(),
)
.await;
base::wait_txpool(&txpool, 1).await;
// generate block 3
consensus
.in_message_tx()
.unbounded_send(ConsensusInMessage::Generate)
.unwrap();
base::wait_block_execution(&chain, 3).await;
// allowance after transferring from
let result: Vec<u8> = chain
.execute_call_with_block_number(
&3,
Some(&account1.address),
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
method: "allowance".to_string(),
params: format!(
r#"{{"owner":"{}","spender":"{}"}}"#,
Address((account1.address).0.clone()),
Address((account2.address).0.clone())
)
.as_bytes()
.to_vec(),
pay_value: 0,
},
)
.unwrap()
.unwrap();
let result = String::from_utf8(result).unwrap();
log::info!("result: {}", result);
assert_eq!(result, r#"99999900000000"#.to_string(),);
// sender balance
let result: Vec<u8> = chain
.execute_call_with_block_number(
&3,
Some(&account1.address),
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
method: "balance".to_string(),
params: format!(
r#"{{"address":"{}"}}"#,
Address((account1.address).0.clone())
)
.as_bytes()
.to_vec(),
pay_value: 0,
},
)
.unwrap()
.unwrap();
let result = String::from_utf8(result).unwrap();
log::info!("result: {}", result);
assert_eq!(result, r#"2099999900000000"#.to_string(),);
// recipient balance
let result: Vec<u8> = chain
.execute_call_with_block_number(
&3,
Some(&account1.address),
"contract".to_string(),
"execute".to_string(),
module::contract::ExecuteParams {
contract_address: contract_address.clone(),
method: "balance".to_string(),
params: format!(
r#"{{"address":"{}"}}"#,
Address((account2.address).0.clone())
)
.as_bytes()
.to_vec(),
pay_value: 0,
},
)
.unwrap()
.unwrap();
let result = String::from_utf8(result).unwrap();
log::info!("result: {}", result);
assert_eq!(result, r#"100000000"#.to_string(),);
}
fn get_code() -> &'static [u8] {
let code =
include_bytes!("../../../vm/contract-samples/token/release/contract_samples_token_bg.wasm");
code
}
|
use alloc::fmt;
use alloc::string::String;
/// Errors for [`WritableAsset`]
#[derive(Clone, Debug)]
pub enum WritableAssetError {
/// Raised when failed to close an asset
CloseFailed(String),
/// Raised when failed to write to an asset
WriteFailed(String),
}
impl fmt::Display for WritableAssetError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
WritableAssetError::CloseFailed(reason) => {
write!(f, "failed to close asset: {}", reason)
}
WritableAssetError::WriteFailed(reason) => {
write!(f, "failed to save changes to asset: {}", reason)
}
}
}
}
/// Interface for writing data to an asset.
///
/// see [`Resolver::open_asset_for_write()`] for how to retrieve instances of
/// this object.
pub trait WritableAsset {
/// Close this asset, performing any necessary finalization or commits
/// of data that was previously written. Returns [`WriteAssetError`]
/// on failure.
///
/// If successful, reads to the written asset in the same process should
/// reflect the fully written state by the time this function returns.
/// Also, further calls to any functions on this interface are invalid.
fn close(&mut self) -> Result<(), WritableAssetError>;
/// Writes `count` bytes from `buffer` at `offset` from the beginning
/// of the asset. Returns number of bytes written, or [`WritableAssetError`].
fn write(
&mut self,
buffer: &[u8],
count: usize,
offset: usize,
) -> Result<usize, WritableAssetError>;
}
|
// Copyright 2018-2019 Mozilla
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
use crate::backend::traits::BackendStat;
pub struct StatImpl(pub(crate) lmdb::Stat);
impl BackendStat for StatImpl {
fn page_size(&self) -> usize {
self.0.page_size() as usize
}
fn depth(&self) -> usize {
self.0.depth() as usize
}
fn branch_pages(&self) -> usize {
self.0.branch_pages()
}
fn leaf_pages(&self) -> usize {
self.0.leaf_pages()
}
fn overflow_pages(&self) -> usize {
self.0.overflow_pages()
}
fn entries(&self) -> usize {
self.0.entries()
}
}
|
// use futures::stream::TryStreamExt;
// use futures::Stream;
// use parity_tokio_ipc::Endpoint as IpcEndpoint;
// use std::convert::TryFrom;
// use std::{
// pin::Pin,
// task::{Context, Poll},
// time::Duration,
// };
// use tokio::io::{AsyncRead, AsyncWrite};
// use tokio::sync::mpsc;
// use tonic::transport::server::Connected;
// use tonic::transport::{Endpoint, Uri};
// use tonic::{transport::Server, Request, Response, Status, Streaming};
// use tower::service_fn;
// use pahkat_client::{PackageAction, PackageKey};
// mod pb {
// tonic::include_proto!("/pahkat");
// }
// impl From<PackageAction> for pb::PackageAction {
// fn from(action: PackageAction) -> pb::PackageAction {
// pb::PackageAction {
// id: action.id.to_string(),
// action: action.action.to_u8() as u32,
// target: action.target as u8 as u32,
// }
// }
// }
// // #[cffi::marshal]
// // pub extern "C" fn pahkat_rpc_new() {}
// // #[cffi::marshal]
// // pub extern "C" fn pahkat_rpc_notifications(handle: TODO) {}
// // #[cffi::marshal]
// // pub extern "C" fn pahkat_rpc_repo_indexes(handle: TODO) {}
// // #[cffi::marshal]
// // pub extern "C" fn pahkat_rpc_status(handle: TODO) {}
// // #[cffi::marshal]
// // pub extern "C" fn pahkat_rpc_process_transaction(handle: TODO) {}
// #[tokio::main]
// async fn main() -> Result<(), Box<dyn std::error::Error>> {
// env_logger::init();
// let channel = Endpoint::try_from("file://tmp/pahkat")?
// .connect_with_connector(service_fn(|_: Uri| {
// let path = if cfg!(windows) {
// format!("//./pipe/pahkat")
// } else {
// format!("/tmp/pahkat")
// };
// IpcEndpoint::connect(path)
// }))
// .await?;
// let mut client = pb::pahkat_client::PahkatClient::new(channel);
// let stream = client
// .notifications(tonic::Request::new(pb::NotificationsRequest {}))
// .await?;
// let mut stream = stream.into_inner();
// tokio::spawn(async move {
// while let Ok(Some(response)) = stream.message().await {
// println!("RESPONSE={:?}", response);
// }
// });
// let request = tonic::Request::new(pb::TransactionRequest {
// actions: vec![PackageAction::install(
// PackageKey::try_from("https://x.brendan.so/divvun-pahkat-repo/packages/speller-smj")
// .unwrap(),
// Default::default(),
// )
// .into()],
// });
// let stream = client.process_transaction(request).await?;
// let mut stream = stream.into_inner();
// while let Ok(Some(response)) = stream.message().await {
// println!("RESPONSE={:?}", response);
// }
// // let req = tonic::Request::new(pb::RefreshRequest {});
// // let response = client.refresh(req).await?;
// // let req = tonic::Request::new(pb::RefreshRequest {});
// // let response = client.refresh(req).await?;
// // let req = tonic::Request::new(pb::RefreshRequest {});
// // let response = client.refresh(req).await?;
// Ok(())
// }
#[tokio::main]
async fn main() -> anyhow::Result<()> {
pahkat_rpc::client::run().await
}
|
use crate::utils::lines_from_file;
use lazy_static::lazy_static;
use regex::{Error, Regex};
use std::{str::FromStr, time::Instant};
pub fn main() {
let start = Instant::now();
let entries = lines_from_file("src/day_02/input.txt");
println!("valid_count {:?}", part_2(entries));
let duration = start.elapsed();
println!("Finished after {:?}", duration);
}
fn part_2(entries: Vec<String>) -> i32 {
let mut valid_count = 0;
for entry in entries {
let line = Line::from_str(entry.as_str()).unwrap();
let char_a = line.password.chars().nth((line.min - 1) as usize).unwrap();
let char_b = line.password.chars().nth((line.max - 1) as usize).unwrap();
if (char_a.eq(&line.letter) && !char_b.eq(&line.letter))
|| !char_a.eq(&line.letter) && char_b.eq(&line.letter)
{
valid_count += 1;
}
}
valid_count
}
fn part_1(entries: Vec<String>) -> i32 {
let mut valid_count = 0;
for entry in entries {
let line = Line::from_str(entry.as_str()).unwrap();
let re = Regex::new(line.letter.to_string().as_str()).unwrap();
let caps = re.captures_iter(line.password.as_str());
let match_count = caps.into_iter().collect::<Vec<_>>().len() as u8;
if match_count >= line.min && match_count <= line.max {
valid_count += 1;
}
}
valid_count
}
struct Line {
min: u8,
max: u8,
letter: char,
password: String,
}
impl FromStr for Line {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
lazy_static! {
static ref RE: Regex = Regex::new(
r"(?P<min>\d{1,2})-(?P<max>\d{1,2})\s(?P<letter>\w):\s(?P<password>\w*)"
)
.unwrap();
}
let caps = RE.captures(s).unwrap();
Ok(Line {
min: caps["min"].parse::<u8>().unwrap(),
max: caps["max"].parse::<u8>().unwrap(),
letter: caps["letter"].chars().next().unwrap(),
password: caps["password"].to_string(),
})
}
}
|
extern crate hyphenated_name;
fn main() {
println!("Hyphenated: {}", hyphenated_name::NAME);
}
|
//! An example of generating constant valued noise
extern crate noise;
use noise::Checkerboard;
use noise::utils::*;
fn main() {
let checker = Checkerboard::new();
PlaneMapBuilder::new(&checker)
.build()
.write_to_file("checkerboard.png");
}
|
use super::parser::LvarCollector;
use crate::util::{Annot, IdentId, Loc};
#[derive(Debug, Clone, PartialEq)]
pub enum NodeKind {
SelfValue,
Nil,
Integer(i64),
Float(f64),
Bool(bool),
String(String),
InterporatedString(Vec<Node>),
Symbol(IdentId),
Range {
start: Box<Node>,
end: Box<Node>,
exclude_end: bool,
}, // start, end, exclude_end
Array(NodeVec),
Hash(Vec<(Node, Node)>),
RegExp(Vec<Node>),
LocalVar(IdentId),
Ident(IdentId),
InstanceVar(IdentId),
GlobalVar(IdentId),
Const {
toplevel: bool,
id: IdentId,
},
Scope(Box<Node>, IdentId),
BinOp(BinOp, Box<Node>, Box<Node>),
UnOp(UnOp, Box<Node>),
ArrayMember {
array: Box<Node>,
index: Vec<Node>,
},
Splat(Box<Node>),
Assign(Box<Node>, Box<Node>),
AssignOp(BinOp, Box<Node>, Box<Node>),
MulAssign(Vec<Node>, Vec<Node>),
CompStmt(NodeVec),
If {
cond: Box<Node>,
then_: Box<Node>,
else_: Box<Node>,
},
For {
param: Box<Node>,
iter: Box<Node>,
body: Box<Node>,
},
While {
cond: Box<Node>,
body: Box<Node>,
},
Case {
cond: Box<Node>,
when_: Vec<CaseBranch>,
else_: Box<Node>,
},
Begin {
body: Box<Node>,
rescue: Vec<(Node, Node)>, // (ex_class_list, ex_param)
else_: Box<Node>,
ensure: Box<Node>,
},
Proc {
params: NodeVec,
body: Box<Node>,
lvar: LvarCollector,
},
Break(Box<Node>),
Next(Box<Node>),
Return(Box<Node>),
Yield(SendArgs),
Param(IdentId),
PostParam(IdentId),
OptionalParam(IdentId, Box<Node>),
RestParam(IdentId),
KeywordParam(IdentId, Box<Option<Node>>),
BlockParam(IdentId),
MethodDef(IdentId, NodeVec, Box<Node>, LvarCollector), // id, params, body
SingletonMethodDef(Box<Node>, IdentId, NodeVec, Box<Node>, LvarCollector), // singleton_class, id, params, body
ClassDef {
id: IdentId,
superclass: Box<Node>,
body: Box<Node>,
lvar: LvarCollector,
is_module: bool,
},
Send {
receiver: Box<Node>,
method: IdentId,
send_args: SendArgs,
completed: bool,
}, //receiver, method_name, args
}
#[derive(Debug, Clone, PartialEq)]
pub struct SendArgs {
pub args: NodeVec,
pub kw_args: Vec<(IdentId, Node)>,
pub block: Option<Box<Node>>,
}
impl SendArgs {
pub fn default() -> Self {
SendArgs {
args: vec![],
kw_args: vec![],
block: None,
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct CaseBranch {
pub when: Vec<Node>,
pub body: Box<Node>,
}
impl CaseBranch {
pub fn new(when: Vec<Node>, body: Node) -> Self {
CaseBranch {
when,
body: Box::new(body),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum BinOp {
Add,
Sub,
Mul,
Div,
Rem,
Exp,
Shr,
Shl,
BitAnd,
BitOr,
BitXor,
Eq,
Ne,
TEq,
Gt,
Ge,
Lt,
Le,
Cmp,
LAnd,
LOr,
Match,
}
#[derive(Debug, Clone, PartialEq)]
pub enum UnOp {
BitNot,
Not,
}
pub type Node = Annot<NodeKind>;
pub type NodeVec = Vec<Node>;
impl Node {
pub fn new_nil(loc: Loc) -> Self {
Node::new(NodeKind::Nil, loc)
}
pub fn new_integer(num: i64, loc: Loc) -> Self {
Node::new(NodeKind::Integer(num), loc)
}
pub fn new_bool(b: bool, loc: Loc) -> Self {
Node::new(NodeKind::Bool(b), loc)
}
pub fn new_float(num: f64, loc: Loc) -> Self {
Node::new(NodeKind::Float(num), loc)
}
pub fn new_string(s: String, loc: Loc) -> Self {
Node::new(NodeKind::String(s), loc)
}
pub fn new_array(nodes: Vec<Node>, loc: Loc) -> Self {
let loc = match nodes.last() {
Some(node) => loc.merge(node.loc()),
None => loc,
};
Node::new(NodeKind::Array(nodes), loc)
}
pub fn new_range(start: Node, end: Node, exclude_end: bool, loc: Loc) -> Self {
Node::new(
NodeKind::Range {
start: Box::new(start),
end: Box::new(end),
exclude_end,
},
loc,
)
}
pub fn new_hash(key_value: Vec<(Node, Node)>, loc: Loc) -> Self {
Node::new(NodeKind::Hash(key_value), loc)
}
pub fn new_regexp(regex: Vec<Node>, loc: Loc) -> Self {
Node::new(NodeKind::RegExp(regex), loc)
}
pub fn new_self(loc: Loc) -> Self {
Node::new(NodeKind::SelfValue, loc)
}
pub fn new_interporated_string(nodes: Vec<Node>, loc: Loc) -> Self {
Node::new(NodeKind::InterporatedString(nodes), loc)
}
pub fn new_comp_stmt(nodes: Vec<Node>, mut loc: Loc) -> Self {
if let Some(node) = nodes.first() {
loc = node.loc();
};
if let Some(node) = nodes.last() {
loc = loc.merge(node.loc());
};
Node::new(NodeKind::CompStmt(nodes), loc)
}
pub fn new_nop(loc: Loc) -> Self {
Node::new(NodeKind::CompStmt(vec![]), loc)
}
pub fn new_binop(op: BinOp, lhs: Node, rhs: Node) -> Self {
let loc = (lhs.loc()).merge(rhs.loc());
let kind = NodeKind::BinOp(op, Box::new(lhs), Box::new(rhs));
Node::new(kind, loc)
}
pub fn new_unop(op: UnOp, lhs: Node, loc: Loc) -> Self {
let loc = loc.merge(lhs.loc());
let kind = NodeKind::UnOp(op, Box::new(lhs));
Node::new(kind, loc)
}
pub fn new_array_member(array: Node, index: Vec<Node>, loc: Loc) -> Self {
let kind = NodeKind::ArrayMember {
array: Box::new(array),
index,
};
Node::new(kind, loc)
}
pub fn new_splat(array: Node, loc: Loc) -> Self {
let loc = loc.merge(array.loc());
Node::new(NodeKind::Splat(Box::new(array)), loc)
}
pub fn new_lvar(id: IdentId, loc: Loc) -> Self {
Node::new(NodeKind::LocalVar(id), loc)
}
pub fn new_param(id: IdentId, loc: Loc) -> Self {
Node::new(NodeKind::Param(id), loc)
}
pub fn new_optional_param(id: IdentId, default: Node, loc: Loc) -> Self {
Node::new(NodeKind::OptionalParam(id, Box::new(default)), loc)
}
pub fn new_splat_param(id: IdentId, loc: Loc) -> Self {
Node::new(NodeKind::RestParam(id), loc)
}
pub fn new_post_param(id: IdentId, loc: Loc) -> Self {
Node::new(NodeKind::PostParam(id), loc)
}
pub fn new_keyword_param(id: IdentId, default: Option<Node>, loc: Loc) -> Self {
Node::new(NodeKind::KeywordParam(id, Box::new(default)), loc)
}
pub fn new_block_param(id: IdentId, loc: Loc) -> Self {
Node::new(NodeKind::BlockParam(id), loc)
}
pub fn new_identifier(id: IdentId, loc: Loc) -> Self {
Node::new(NodeKind::Ident(id), loc)
}
pub fn new_symbol(id: IdentId, loc: Loc) -> Self {
Node::new(NodeKind::Symbol(id), loc)
}
pub fn new_instance_var(id: IdentId, loc: Loc) -> Self {
Node::new(NodeKind::InstanceVar(id), loc)
}
pub fn new_global_var(id: IdentId, loc: Loc) -> Self {
Node::new(NodeKind::GlobalVar(id), loc)
}
pub fn new_const(id: IdentId, toplevel: bool, loc: Loc) -> Self {
Node::new(NodeKind::Const { toplevel, id }, loc)
}
pub fn new_scope(parent: Node, id: IdentId, loc: Loc) -> Self {
Node::new(NodeKind::Scope(Box::new(parent), id), loc)
}
pub fn new_mul_assign(lhs: Vec<Node>, rhs: Vec<Node>) -> Self {
let loc = lhs[0].loc().merge(rhs[rhs.len() - 1].loc());
Node::new(NodeKind::MulAssign(lhs, rhs), loc)
}
pub fn new_method_decl(
id: IdentId,
params: Vec<Node>,
body: Node,
lvar: LvarCollector,
) -> Self {
let loc = body.loc();
Node::new(NodeKind::MethodDef(id, params, Box::new(body), lvar), loc)
}
pub fn new_singleton_method_decl(
singleton: Node,
id: IdentId,
params: Vec<Node>,
body: Node,
lvar: LvarCollector,
) -> Self {
let loc = body.loc();
Node::new(
NodeKind::SingletonMethodDef(Box::new(singleton), id, params, Box::new(body), lvar),
loc,
)
}
pub fn new_class_decl(
id: IdentId,
superclass: Node,
body: Node,
lvar: LvarCollector,
is_module: bool,
loc: Loc,
) -> Self {
Node::new(
NodeKind::ClassDef {
id,
superclass: Box::new(superclass),
body: Box::new(body),
is_module,
lvar,
},
loc,
)
}
pub fn new_send(
receiver: Node,
method: IdentId,
mut send_args: SendArgs,
completed: bool,
loc: Loc,
) -> Self {
let loc = match (send_args.args.last(), &send_args.block) {
(Some(arg), _) => loc.merge(arg.loc),
_ => loc,
};
send_args.args.reverse();
Node::new(
NodeKind::Send {
receiver: Box::new(receiver),
method,
send_args,
completed,
},
loc,
)
}
pub fn new_send_noarg(receiver: Node, method: IdentId, completed: bool, loc: Loc) -> Self {
let send_args = SendArgs {
args: vec![],
kw_args: vec![],
block: None,
};
Node::new(
NodeKind::Send {
receiver: Box::new(receiver),
method,
send_args,
completed,
},
loc,
)
}
pub fn new_if(cond: Node, then_: Node, else_: Node, loc: Loc) -> Self {
let loc = loc.merge(then_.loc()).merge(else_.loc());
Node::new(
NodeKind::If {
cond: Box::new(cond),
then_: Box::new(then_),
else_: Box::new(else_),
},
loc,
)
}
pub fn new_while(cond: Node, body: Node, loc: Loc) -> Self {
let loc = loc.merge(body.loc());
Node::new(
NodeKind::While {
cond: Box::new(cond),
body: Box::new(body),
},
loc,
)
}
pub fn new_case(cond: Node, when_: Vec<CaseBranch>, else_: Node, loc: Loc) -> Self {
let loc = loc.merge(else_.loc());
Node::new(
NodeKind::Case {
cond: Box::new(cond),
when_,
else_: Box::new(else_),
},
loc,
)
}
pub fn new_begin(
body: Node,
rescue: Vec<(Node, Node)>,
else_: Node,
ensure: Node,
loc: Loc,
) -> Self {
Node::new(
NodeKind::Begin {
body: Box::new(body),
rescue,
else_: Box::new(else_),
ensure: Box::new(ensure),
},
loc,
)
}
pub fn new_break(val: Node, loc: Loc) -> Self {
Node::new(NodeKind::Break(Box::new(val)), loc)
}
pub fn new_next(val: Node, loc: Loc) -> Self {
Node::new(NodeKind::Next(Box::new(val)), loc)
}
pub fn new_return(val: Node, loc: Loc) -> Self {
Node::new(NodeKind::Return(Box::new(val)), loc)
}
pub fn new_yield(mut args: SendArgs, loc: Loc) -> Self {
args.args.reverse();
Node::new(NodeKind::Yield(args), loc)
}
pub fn new_proc(params: NodeVec, body: Node, lvar: LvarCollector, loc: Loc) -> Self {
let loc = loc.merge(body.loc());
Node::new(
NodeKind::Proc {
params,
body: Box::new(body),
lvar,
},
loc,
)
}
pub fn is_operation(&self) -> bool {
match self.kind {
NodeKind::Ident(_) => true,
_ => false,
}
}
pub fn is_lvar(&self) -> bool {
match self.kind {
NodeKind::Ident(_) | NodeKind::LocalVar(_) => true,
_ => false,
}
}
pub fn as_method_name(&self) -> Option<IdentId> {
match self.kind {
NodeKind::Const { id, .. } | NodeKind::Ident(id) | NodeKind::LocalVar(id) => Some(id),
_ => None,
}
}
}
impl std::fmt::Display for Node {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.kind {
NodeKind::BinOp(op, lhs, rhs) => write!(f, "({:?}: {}, {})", op, lhs, rhs),
NodeKind::Ident(id) => write!(f, "(Ident {:?})", id),
NodeKind::LocalVar(id) => write!(f, "(LocalVar {:?})", id),
NodeKind::Send {
receiver,
method,
send_args,
..
} => {
write!(f, "[ Send [{}]: [{:?}]", receiver, method)?;
for node in &send_args.args {
write!(f, "({}) ", node)?;
}
write!(f, "]")?;
Ok(())
}
NodeKind::CompStmt(nodes) => {
write!(f, "[ CompStmt ")?;
for node in nodes {
write!(f, "({}) ", node)?;
}
write!(f, "]")?;
Ok(())
}
NodeKind::MethodDef(id, args, body, _) => {
write!(f, "[ MethodDef {:?}: PARAM(", id)?;
for arg in args {
write!(f, "({}) ", arg)?;
}
write!(f, ") BODY({})]", body)?;
Ok(())
}
NodeKind::If { cond, then_, else_ } => {
write!(f, "[ If COND({}) THEN({}) ELSE({}) ]", cond, then_, else_)
}
_ => write!(f, "[{:?}]", self.kind),
}
}
}
|
#![allow(dead_code)]
use crate::*;
use num::Integer;
use std::collections::HashSet;
use std::str::FromStr;
use ndarray::Array2;
use itertools::Itertools;
const DAY: usize = 10;
#[derive(Clone, PartialEq, Debug)]
pub enum GridField {
Asteroid,
Empty,
}
impl FromStr for Grid<GridField> {
type Err = AocErr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let width = s.lines().next().map(|l| l.chars().count()).unwrap_or(0);
let height = s.lines().count();
let v = s
.lines()
.flat_map(|l| l.chars())
.map(|c| match c {
'#' => GridField::Asteroid,
'.' => GridField::Empty,
_ => unreachable!()
})
.collect_vec();
let mut arr = Array2::from_shape_vec((height, width), v)
.unwrap();
arr.swap_axes(1, 0);
Ok(Grid{
arr
})
}
}
pub struct Grid<T> {
arr: Array2<T>
}
impl Grid<GridField> {
pub fn asteroids(&self) -> impl Iterator<Item = ((usize, usize), &GridField)> + '_ {
self.arr.indexed_iter()
.filter(|f| match f.1 {
&GridField::Empty => false,
&GridField::Asteroid => true,
})
}
fn visible_asteroids(&self, from: (usize, usize)) -> impl Iterator<Item = ((usize, usize), &GridField)> + '_ {
self.asteroids()
.filter(move |(to, _)| self.is_visible(from, *to))
}
fn best_place(&self) -> Option<((usize, usize), usize)> {
self.asteroids()
.map(|(from, _)| {
let n = self
.asteroids()
.filter(|(to, _)| self.is_visible(from, *to))
.count();
(from, n)
})
.max_by_key(|(_, count)| *count)
}
fn calc_degree(from: (usize, usize), to: (usize, usize)) -> f64 {
use std::f64::consts::*;
let slope_x = (to.0 as f64) - (from.0 as f64);
let slope_y = (to.1 as f64) - (from.1 as f64);
let rad = slope_y.atan2(slope_x);
let rad = match rad {
rad if rad.is_sign_negative() => {
let rad = PI + rad;
match rad {
rad if rad >= FRAC_PI_2 => rad - FRAC_PI_2,
rad if rad == PI => PI + FRAC_PI_2,
rad => PI + FRAC_PI_2 + rad,
}
}
rad => rad + FRAC_PI_2,
};
rad * 180. / PI
}
fn vaporize(&mut self, from: (usize, usize)) -> Vec<(usize, usize)> {
let mut vap_seq = Vec::new();
let mut vaporized = HashSet::new();
loop {
let mut ast: Vec<_> = self
.visible_asteroids(from)
.filter(|(pt, _)| !vaporized.contains(pt))
.map(|(pt, _)| (pt, (Self::calc_degree(from, pt).floor() * 100.) as usize))
.collect();
if ast.is_empty() {
return vap_seq;
}
ast.sort_by_key(|(_, deg)| deg.clone());
for (pt, _) in ast.iter().cloned() {
vaporized.insert(pt);
vap_seq.push(pt);
}
for vap in vaporized.iter().cloned() {
self.arr[vap] = GridField::Empty;
}
}
}
fn is_visible(&self, from: (usize, usize), to: (usize, usize)) -> bool {
if from == to {
return false;
}
let x_delta = (to.0 as isize) - (from.0 as isize);
let y_delta = (to.1 as isize) - (from.1 as isize);
let div = match (x_delta, y_delta) {
(0, y) => y.abs(),
(x, 0) => x.abs(),
(x, y) => x.gcd(&y),
};
let x_delta = x_delta / div;
let y_delta = y_delta / div;
(1..div)
.map(|k| {
let x = (from.0 as isize) + x_delta * k;
let y = (from.1 as isize) + y_delta * k;
(x as usize, y as usize)
})
.all(|pt| self.arr[pt] != GridField::Asteroid)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn part1() -> AocResult<()> {
let grid: Grid<GridField> = parse_file(FileType::Example, DAY, 1)?;
assert!(!grid.is_visible((4, 4), (4, 0)));
assert_eq!(((3, 4), 8), grid.best_place().unwrap());
let grid: Grid<GridField> = parse_file(FileType::Example, DAY, 2)?;
assert_eq!(((5, 8), 33), grid.best_place().unwrap());
let grid: Grid<GridField> = parse_file(FileType::Example, DAY, 3)?;
assert_eq!(((1, 2), 35), grid.best_place().unwrap());
let grid: Grid<GridField> = parse_file(FileType::Example, DAY, 4)?;
assert_eq!(((6, 3), 41), grid.best_place().unwrap());
let grid: Grid<GridField> = parse_file(FileType::Example, DAY, 5)?;
assert_eq!(((11, 13), 210), grid.best_place().unwrap());
let grid: Grid<GridField> = parse_file(FileType::Input, DAY, 1)?;
assert_eq!(((23, 29), 263), grid.best_place().unwrap());
Ok(())
}
#[test]
fn part2() -> AocResult<()> {
//TODO use nearly equal
assert_eq!(0., Grid::calc_degree((1, 1), (1, 0)));
assert_eq!(90., Grid::calc_degree((1, 1), (2, 1)));
assert_eq!(180., Grid::calc_degree((1, 1), (1, 2)));
assert_eq!(270., Grid::calc_degree((1, 1), (0, 1)));
assert_eq!(90. + 45., Grid::calc_degree((2, 2), (3, 3)));
assert_eq!(45., Grid::calc_degree((2, 2), (3, 1)));
assert_eq!(180. + 45., Grid::calc_degree((2, 2), (1, 3)));
assert_eq!(270. + 45., Grid::calc_degree((2, 2), (1, 1)));
let mut grid: Grid<GridField> = parse_file(FileType::Example, DAY, 6)?;
itertools::assert_equal(
[(8, 1), (9, 0), (9, 1)].iter(),
grid.vaporize((8, 3)).iter().take(3),
);
let mut grid: Grid<GridField> = parse_file(FileType::Example, DAY, 5)?;
let vaps = grid.vaporize((11, 13));
assert_eq!(vaps.iter().position(|pt| *pt == (8, 2)).unwrap(), 198);
let mut grid: Grid<GridField> = parse_file(FileType::Input, DAY, 1)?;
let vaps = grid.vaporize((23, 29));
assert_eq!(vaps[199], (11, 10));
Ok(())
}
}
|
use std::collections::HashMap;
use std::sync::Arc;
use std::sync::mpsc::Receiver;
use std::time::{Duration, Instant};
use libdeflater::{CompressionLvl, Compressor, Decompressor};
use mio::{Events, Poll};
use packet_transformation::handling::{HandlingContext, UnparsedPacket};
use packet_transformation::TransformationResult;
use utils::buffer_helpers::{buffer_read, copy_slice_to, read_frame, write_socket, write_socket0};
use utils::buffer_helpers::{compress_packet, decompress_packet, get_needed_data};
use utils::buffers::{VarInts, VarIntsMut};
use utils::contexts::{ConnectionContext, Message, NetworkThreadContext};
use utils::contexts::Message::{NewConnection, Threads};
use utils::indexed_vec::IndexedVec;
/// Start network thread loop.
/// Responsible for parsing and transforming every out/incoming packets.
pub fn thread_loop(rx: Receiver<Message>, handler: Arc<HandlingContext>, id: usize) {
// Create thread context
let mut thread_ctx = {
let connections = HashMap::new();
let threads = match rx.recv().unwrap() {
Threads(threads) => {
threads
}
_ => panic!("unexpected message")
};
let thread = threads[id].clone();
NetworkThreadContext {
connections,
threads,
thread,
}
};
// todo adjust?
let mut events = Events::with_capacity(1000);
let mut poll = Poll::new().expect("could not unwrap poll");
// max and min interval for polling the message queue
let max_delay = Duration::from_millis(200);
let min_delay = Duration::from_millis(10);
//Per thread buffers
let mut packet_buf = IndexedVec::new();
utils::set_vec_len(&mut packet_buf.vec, 2048);
let mut compression_buf = IndexedVec::new();
utils::set_vec_len(&mut compression_buf.vec, 2048);
let mut caching_buf = IndexedVec::new();
utils::set_vec_len(&mut caching_buf.vec, 2048);
let mut decompressor = Decompressor::new();
let mut compressor = Compressor::new(CompressionLvl::fastest());
let mut id_counter = 0;
// Start parsing loop
loop {
let ins = Instant::now();
poll.poll(&mut events, Some(max_delay)).expect("couldn't poll");
for event in events.iter() {
// FIXME: I used remove to get around the borrow checker hopefully there is a better way. also i assume this is slower.
if let Some(mut player) = thread_ctx.connections.remove(&event.token()) {
if event.is_writable() {
process_write(&mut player);
}
if event.is_readable() {
let mut other = thread_ctx.connections.remove(&player.token_other).unwrap();
process_read(&mut thread_ctx, &mut player, &mut other, &mut packet_buf, &mut caching_buf, handler.clone(), &mut compression_buf, &mut decompressor, &mut compressor);
thread_ctx.connections.insert(player.token_other.clone(), other);
}
if player.should_close {
// Connection socket is not active anymore, remove context
thread_ctx.connections.remove(&player.token_other);
continue;
}
thread_ctx.connections.insert(player.token_self.clone(), player);
}
}
// Process all incoming messages
for msg in rx.try_iter() {
match msg {
NewConnection(c2s, s2c) => {
// New connection has been associated to this thread
println!("Player connection");
// Create connection context
ConnectionContext::create_pair(id_counter, c2s, s2c, &poll, &mut thread_ctx.connections);
id_counter += 1;
}
_ => { println!("got unexpected message"); }
}
}
let elapsed = ins.elapsed();
if elapsed < min_delay {
std::thread::sleep(min_delay - elapsed);
}
}
}
// write buffered data
fn process_write(ctx: &mut ConnectionContext) {
ctx.is_writable = true;
if !write_socket0(&mut ctx.stream, &mut ctx.write_buffering, &mut ctx.should_close) {
ctx.is_writable = false;
}
}
// todo handle protocol state switching. right now we only check packet ids
// todo handle encryption
// todo handle compression
fn process_read(mut thread_ctx: &mut NetworkThreadContext,
connection_ctx: &mut ConnectionContext,
other_ctx: &mut ConnectionContext,
read_buf: &mut IndexedVec<u8>,
caching_buf: &mut IndexedVec<u8>,
handler: Arc<HandlingContext>,
compression_buffer: &mut IndexedVec<u8>,
decompressor: &mut Decompressor,
compressor: &mut Compressor) {
let mut pointer = 0;
let mut next;
read_buf.reset();
caching_buf.reset();
// read new packets
get_needed_data(read_buf, connection_ctx);
if connection_ctx.should_close {
return;
}
let readable = read_buf.readable_bytes();
// read all the packets
while readable > pointer {
if let Some((packet_len, packet_len_bytes_red)) = read_frame(read_buf, pointer, readable, connection_ctx) {
let offset = pointer + packet_len_bytes_red;
next = offset + packet_len as usize;
// the full packet is available
if readable >= next {
let mut working_buf = &read_buf.vec[offset..offset + packet_len];
let compression_threshold = connection_ctx.compression_threshold;
if compression_threshold > 0 {
let real_length = working_buf.get_var_i32();
if real_length.0 > 0 {
compression_buffer.reset();
decompress_packet(real_length.0 as usize, &mut working_buf, decompressor, compression_buffer);
}
}
let (id, _id_bytes) = working_buf.get_var_i32();
let unparsed_packet = UnparsedPacket::new(id, working_buf);
let processing_result =
handler.handle_packet(&mut thread_ctx, connection_ctx, other_ctx, unparsed_packet, connection_ctx.inbound);
match processing_result.0 {
TransformationResult::Unchanged => {
copy_slice_to(&read_buf.vec[pointer..next], caching_buf);
}
TransformationResult::Modified => {
let buffer = processing_result.1.unwrap();
let mut final_buffer = buffer.as_slice();
let mut is_uncompressed = false;
if compression_threshold > 0 {
let length = final_buffer.len();
if length > compression_threshold as usize {
compression_buffer.reset();
compress_packet(&mut final_buffer, compressor, compression_buffer);
} else {
is_uncompressed = true;
}
}
// write in 2 steps to avoid extra copy
let len = final_buffer.len() as i32 + if is_uncompressed { 1 } else { 0 };
let mut frame = IndexedVec::new();
frame.ensure_writable(4);
frame.put_var_i32(len);
if is_uncompressed {
frame.put_var_i32(0);
}
copy_slice_to(frame.as_slice(), caching_buf);
copy_slice_to(final_buffer, caching_buf);
}
TransformationResult::Canceled => {
// NOOP
}
}
if connection_ctx.should_close {
write_socket(connection_ctx, caching_buf);
return;
}
pointer = next;
read_buf.set_reader_index(pointer);
} else {
break;
}
} else {
if connection_ctx.should_close {
return;
}
break;
}
}
read_buf.set_reader_index(pointer);
buffer_read(connection_ctx, read_buf);
write_socket(other_ctx, caching_buf);
} |
use std::env;
use std::fs::File;
use std::io::Read;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::process::Command;
fn main() {
// Get the current githash
match Command::new("git")
.args(&["rev-parse", "--short", "HEAD"])
.output()
{
Ok(output) => match String::from_utf8(output.stdout) {
Ok(hash) => {
let mut target = File::create(
Path::new(
&env::var("OUT_DIR").expect("failed to query OUT_DIR environment variable"),
)
.join("githash"),
)
.expect("failed to create git hash file!");
target
.write_all(hash.trim().as_bytes())
.expect("failed to write to file!");
}
Err(e) => panic!("failed to convert git output to UTF-8: {}", e),
},
Err(e) => panic!("failed to retrieve current git commit hash: {}", e),
}
// Check if git-lfs is working
if std::env::var("DISABLE_GIT_LFS_CHECK").is_err() {
let asset_path: PathBuf = ["..", "assets", "voxygen", "background", "bg_main.png"]
.iter()
.collect();
let asset_file = match File::open(&asset_path) {
Ok(file) => file,
Err(e) => panic!(
"failed to open asset file {}: {}",
asset_path.to_str().unwrap(),
e
),
};
const LFS_MARKER: &[u8] = b"version https://git-lfs.github.com/spec/";
let mut buffer = Vec::new();
let bytes_read = asset_file
.take(LFS_MARKER.len() as u64)
.read_to_end(&mut buffer)
.expect("failed to read asset file");
if bytes_read == LFS_MARKER.len() && buffer == LFS_MARKER {
panic!(
"\n\nGit Large File Storage (git-lfs) has not been set up correctly.\n\
Most common reasons:\n\
\t- git-lfs was not installed before cloning this repository\n\
\t- this repository was not cloned from the primary gitlab mirror.\n\
\t The github mirror does not support lfs.\n\
See the book at https://book.veloren.net/ for details.\n\n"
);
}
}
}
|
#![allow(dead_code)]
use pwasm_abi::eth::EndpointInterface;
use pwasm_abi_derive::eth_abi;
#[eth_abi(StringsEndpoint, StringsClient)]
pub trait StringsContract {
fn string(&mut self, v: String);
}
const PAYLOAD_SAMPLE_1: &[u8] = &[
0x3F, 0xCF, 0x74, 0xC6,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2f,
0x41, 0x73, 0x68, 0x20, 0x6e, 0x61, 0x7a, 0x67, 0x20, 0x74, 0x68, 0x72, 0x61, 0x6b, 0x61, 0x74,
0x75, 0x6c, 0xc3, 0xbb, 0x6b, 0x20, 0x61, 0x67, 0x68, 0x20, 0x62, 0x75, 0x72, 0x7a, 0x75, 0x6d,
0x2d, 0x69, 0x73, 0x68, 0x69, 0x20, 0x6b, 0x72, 0x69, 0x6d, 0x70, 0x61, 0x74, 0x75, 0x6c, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
];
#[test]
fn strings() {
#[derive(Default)]
pub struct Instance {
pub s1: String,
}
impl StringsContract for Instance {
fn string(&mut self, s: String) {
self.s1 = s;
}
}
let mut endpoint = StringsEndpoint::new(Instance::default());
endpoint.dispatch(PAYLOAD_SAMPLE_1);
let test_string = String::from("Ash nazg thrakatulûk agh burzum-ishi krimpatul");
assert_eq!(endpoint.inner.s1, test_string);
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use super::super::utils::build_proof_options;
#[test]
fn fib8_test_basic_proof_verification() {
let fib = Box::new(super::Fib8Example::new(64, build_proof_options(false)));
crate::tests::test_basic_proof_verification(fib);
}
#[test]
fn fib8_test_basic_proof_verification_extension() {
let fib = Box::new(super::Fib8Example::new(64, build_proof_options(true)));
crate::tests::test_basic_proof_verification(fib);
}
#[test]
fn fib8_test_basic_proof_verification_fail() {
let fib = Box::new(super::Fib8Example::new(64, build_proof_options(false)));
crate::tests::test_basic_proof_verification_fail(fib);
}
|
//! Music file tagging for an artist and song approch, favouring em dashes
use crate::utils::{cap_filename_ext, format_name, ResponseModel};
use serde::Serialize;
/// A single song that is pretty printed for tagging, used in the [song] path
#[derive(Debug, PartialEq, Clone, Serialize)]
pub struct SingleSong {
/// Original file path of this song before being formatted
file_path: String,
/// Name of the song on its own
name: String,
/// Completed/rendered name, using [Song::name] alongside [Song::artist] and
/// [Song::album] with the use of em dashes to "pretty print" the song
render: String,
/// File extension of the song file (if any), stemming from [Song::file_path]
ext: Option<String>,
/// Optional artist if found. If not, this will default to "unknown" artist
artist: Option<String>,
/// Optional album name if found. If not, it will be removed from [Song::render]
/// all together
album: Option<String>,
}
impl SingleSong {
/// Creates a new [Song] from given arguments. The `artist` and/or `albumn`
/// will overwrite their respective positions in the [Song] structure
pub fn new(
file_path: impl AsRef<str>,
artist: impl Into<Option<String>>,
album: impl Into<Option<String>>,
) -> Self {
let artist = artist.into();
let album = album.into();
let (filename, ext) = cap_filename_ext(file_path.as_ref());
let name = format_name(filename);
let render = format!(
"{}{} — {}",
match &artist {
Some(a) => format_name(a),
None => "Unknown artist".to_string(),
},
match &album {
Some(album) => if album != &name {
format!(" — {}", format_name(album))
} else {
String::new()
},
None => String::new(),
},
name
);
Self {
file_path: file_path.as_ref().to_string(),
name,
render,
ext,
artist,
album,
}
}
}
/// Gives help by providing available endpoints (to [song] and [album])
#[get("/music")]
pub fn help() -> &'static str {
"ROUTE /music\n\n\nAbout\n Allows music tagging with a static/strong artist + albumn + song methodoloy\n of tagging. Formatting uses an em dash to differentiate these layers.\n\nChild routes/endpoints\n - /song: Tags a single song and allows optional context for artist/album"
}
/// Gives help for how to use the [song] path
#[get("/music/song")]
pub fn song_help() -> &'static str {
"POST /music/song?<name>&<album>&<artist>\n\n\nAbout\n Tags a single song path into the typical artist + album + song view. Some\n optional url parameters may be passed like `album` and `artist` in order to\n give explicit context for tagging the song."
}
/// Tags a single song into a song, album and artist. This is typically used for
/// playlists where songs are not in any exact order
#[post("/music/song?<name>&<album>&<artist>")]
pub fn song(
name: String,
album: Option<String>,
artist: Option<String>,
) -> ResponseModel<SingleSong> {
ResponseModel::new(200, "Success", SingleSong::new(name, artist, album))
}
|
use pest_derive::*;
#[derive(Parser)]
#[grammar = "zeroconf.pest"]
pub struct ZeroConfParser;
|
/*!
```rudra-poc
[target]
crate = "cassandra-proto"
version = "0.1.2"
[report]
issue_url = "https://github.com/AlexPikalov/cassandra-proto/issues/3"
issue_date = 2021-01-05
[[bugs]]
analyzer = "UnsafeDataflow"
bug_class = "UninitExposure"
rudra_report_locations = ["src/frame/parser_async.rs:19:1: 97:2"]
```
!*/
#![forbid(unsafe_code)]
fn main() {
panic!("This issue was reported without PoC");
}
|
use actix_web::{
web::{self, HttpRequest, HttpResponse},
Error, Result, ResponseError
};
use crate::AppData;
use mysql_utils::{Db, MyLibError};
use serde::{Serialize, Deserialize};
#[derive(Serialize)]
pub struct DbError {
msg: String
}
impl DbError {
pub fn msg<M: Into<String>>(msg: M) -> Self {
DbError {
msg: msg.into()
}
}
pub fn e(e: MyLibError) -> Self {
DbError {
msg: format!("{:?}", e)
}
}
}
// Often we want to handle or show client-side
/// Converts mysql_util error
pub fn my_lib_wrap(e: MyLibError) -> Error {
Error::from(MyLibErrorWrapper {
e
})
}
pub struct MyLibErrorWrapper {
pub e: MyLibError
}
impl ResponseError for MyLibErrorWrapper {}
impl std::fmt::Debug for MyLibErrorWrapper {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "MyLibError: {:?}", self.e)
}
}
impl std::fmt::Display for MyLibErrorWrapper {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "MyLibError: {:?}", self.e)
}
}
#[derive(Deserialize)]
pub struct QueryInput {
query: String
}
pub fn query(data: web::Data<AppData>, params: web::Json<QueryInput>, _req: HttpRequest) -> Result<HttpResponse> {
let mut db = match Db::new(&data.host, data.port, &data.db, &data.user, &data.pass) {
Ok(db) => db,
Err(e) => return Ok(HttpResponse::Ok().json(DbError::e(e)))
};
let query_res = match Db::query(&mut db.conn, ¶ms.query) {
Ok(query_res) => query_res,
Err(e) => return Ok(HttpResponse::Ok().json(DbError::e(e)))
};
let display_result = match Db::display_result(query_res) {
Ok(display_result) => display_result,
Err(e) => return Ok(HttpResponse::Ok().json(DbError::e(e)))
};
Ok(HttpResponse::Ok().json(display_result))
}
#[derive(Serialize)]
struct Structure<'a> {
tablenames: &'a Vec<String>,
table_defs: &'a mysql_utils::TableDefMap,
}
pub fn structure(data: web::Data<AppData>, _req: HttpRequest) -> Result<HttpResponse> {
let mut db = match Db::new(&data.host, data.port, &data.db, &data.user, &data.pass) {
Ok(db) => db,
Err(e) => return Ok(HttpResponse::Ok().json(DbError::e(e)))
};
match db.init_table_defs() {
Ok(_) => (),
Err(e) => return Ok(HttpResponse::Ok().json(DbError::e(e)))
}
let table_defs = db.table_defs().map_err(my_lib_wrap)?;
let tablenames = db.tablenames().map_err(my_lib_wrap)?;
Ok(HttpResponse::Ok().json(Structure {
tablenames,
table_defs
}))
} |
use std::mem::replace;
use std::fmt::Debug;
fn main() {
// let mut ll: LinkedList<i32> = LinkedList::new();
//// ll.add(10);
//// ll.add(20);
//// ll.add(30);
//// ll.add(40);
//// ll.add(50);
//
//// println!("{:?}", ll.get_data_from_position(0));
//// println!("{:?}", ll.get_data_from_position(0));
//// println!("{:?}", ll.get_data_from_position(2));
//// println!("{:?}", ll.get_data_from_position(3));
// let mut ll: LinkedList<i32> = LinkedList::new();
// ll.add(2);
// ll.add(3);
// ll.add(10);
// ll.change_order(3, 0);
//// assert_eq!(ll.get_data_from_position(0).unwrap(), 3);
//// assert_eq!(ll.get_data_from_position(1).unwrap(), 2);
// ll.change_order(10, 0);
//// assert_eq!(ll.get_data_from_position(0).unwrap(), 10);
//// assert_eq!(ll.get_data_from_position(2).unwrap(), 2);
//// assert_eq!(ll.get_data_from_position(0).unwrap(), 10);
//// assert_eq!(ll.get_data_from_position(1).unwrap(), 3);
//// println!("0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3));
//
// ll.add(25);
//// println!("0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3));
//
//// println!("{:?}", ll.get_data_from_position(1));
// // assert_eq!(ll.get_data_from_position(3).unwrap(), 25);
//
// let d2 = ll.get_data_from_position(ll.len() - 2).unwrap();
// ll.change_order(ll.get_data_from_position(ll.len() - 1).unwrap(), 2);
//// println!("0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3));
// // assert_eq!(ll.get_data_from_position(ll.len() - 1).unwrap(), d2);
//
// ll.add(40);
// let d3 = ll.get_data_from_position(ll.len() - 1).unwrap();
//// println!("0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4));
//// ll.change_order(ll.get_data_from_position(1).unwrap(), ll.len() - 1);
//// println!("0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4));
//// ll.change_order(ll.get_data_from_position(1).unwrap(), 2);
//// println!("0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4));
//// ll.change_order(ll.get_data_from_position(3).unwrap(), 0);
//// println!("0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4));
//// ll.change_order(ll.get_data_from_position(1).unwrap(), 4);
//// println!("0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4));
//
// println!("0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4));
// ll.change_order(ll.get_data_from_position(0).unwrap(), 4);
// println!("0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4));
// ll.change_order(ll.get_data_from_position(4).unwrap(), 0);
// println!("0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4));
//
let mut ll: LinkedList<i32> = LinkedList::new();
ll.add(2);
ll.add(3);
ll.add(4);
println!("0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4));
ll.remove(2);
println!("remove: 2, 0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4));
ll.add(5);
ll.add(6);
println!(" 0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4));
ll.change_order(ll.get_data_from_position(0).unwrap(), ll.len() - 1);
println!("change_order: 0 -> last, 0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4));
ll.add(20);
println!("0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4));
ll.add(40);
println!("add: 40 , 0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}, 5: {:?}, 6: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4), ll.get_data_from_position(5), ll.get_data_from_position(6));
let d6 = ll.get_data_from_position(ll.len() - 1).unwrap();
let d7 = ll.get_data_from_position(ll.len() - 2).unwrap();
ll.change_order(ll.get_data_from_position(ll.len() - 1).unwrap(), 0);
println!("change_order: last -> 0, 0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}, 5: {:?}, 6: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4), ll.get_data_from_position(5), ll.get_data_from_position(6));
ll.change_order(ll.get_data_from_position(0).unwrap(), ll.len() - 1);
println!("change_order: 0 -> last, 0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}, 5: {:?}, 6: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4), ll.get_data_from_position(5), ll.get_data_from_position(6));
ll.add(70);
println!("add: 70, 0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}, 5: {:?}, 6: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4), ll.get_data_from_position(5), ll.get_data_from_position(6));
ll.remove(5);
println!("remove: 5 , 0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}, 5: {:?}, 6: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4), ll.get_data_from_position(5), ll.get_data_from_position(6));
ll.change_order(ll.get_data_from_position(1).unwrap(), ll.len() - 3);
println!("change_order: 1 -> {:?}, 0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}, 5: {:?}, 6: {:?}", ll.len() - 3, ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4), ll.get_data_from_position(5), ll.get_data_from_position(6));
ll.remove(70);
println!("remove: 70 , 0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}, 5: {:?}, 6: {:?}", ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4), ll.get_data_from_position(5), ll.get_data_from_position(6));
ll.change_order(ll.get_data_from_position(ll.len() - 1).unwrap(), ll.len() - 3);
println!("change_order: {:?} -> {:?}, 0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}, 5: {:?}, 6: {:?}", ll.len() - 1, ll.len() - 3, ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4), ll.get_data_from_position(5), ll.get_data_from_position(6));
ll.change_order(ll.get_data_from_position(ll.len() - 1).unwrap(), ll.len() - 3);
println!("change_order: {:?} -> {:?}, 0: {:?}, 1: {:?}, 2: {:?}, 3: {:?}, 4: {:?}, 5: {:?}, 6: {:?}", ll.len() - 1, ll.len() - 3, ll.get_data_from_position(0), ll.get_data_from_position(1), ll.get_data_from_position(2), ll.get_data_from_position(3), ll.get_data_from_position(4), ll.get_data_from_position(5), ll.get_data_from_position(6));
let mut ll2: LinkedList<i32> = LinkedList::new();
ll2.add(2);
ll2.add(3);
println!("change_order: 0: {:?}, 1: {:?}", ll2.get_data_from_position(0), ll2.get_data_from_position(1));
}
#[test]
fn new() {
let ll: LinkedList<i32> = LinkedList::new();
assert_eq!(ll.len(), 0);
}
#[test]
fn add() {
let mut ll: LinkedList<i32> = LinkedList::new();
ll.add(2);
assert_eq!(ll.get_data_from_position(0).unwrap(), 2);
ll.add(3);
assert_eq!(ll.get_data_from_position(1).unwrap(), 3);
}
#[test]
fn remove() {
let mut ll: LinkedList<i32> = LinkedList::new();
ll.add(2);
ll.add(3);
ll.add(2);
ll.remove(2);
assert_eq!(ll.len(), 2);
assert_eq!(ll.get_data_from_position(0).unwrap(), 3);
assert_eq!(ll.get_data_from_position(1).unwrap(), 2);
ll.add(20);
assert_eq!(ll.get_data_from_position(2).unwrap(), 20);
ll.add(100);
let d8 = ll.get_data_from_position(ll.len() - 1).unwrap();
let d9 = ll.get_data_from_position(ll.len() - 2).unwrap();
ll.change_order(ll.get_data_from_position(ll.len() - 1).unwrap(), 0);
assert_eq!(ll.get_data_from_position(0).unwrap(), d8);
assert_eq!(ll.get_data_from_position(ll.len() - 1).unwrap(), d9);
ll.remove(3);
let d10 = ll.get_data_from_position(ll.len() - 1).unwrap();
let d11 = ll.get_data_from_position(ll.len() - 2).unwrap();
ll.change_order(ll.get_data_from_position(ll.len() - 1).unwrap(), 0);
assert_eq!(ll.get_data_from_position(0).unwrap(), d10);
assert_eq!(ll.get_data_from_position(ll.len() - 1).unwrap(), d11);
}
#[test]
fn change_order() {
let mut ll: LinkedList<i32> = LinkedList::new();
ll.add(2);
ll.add(3);
ll.add(10);
ll.change_order(3, 0);
assert_eq!(ll.get_data_from_position(0).unwrap(), 3);
assert_eq!(ll.get_data_from_position(1).unwrap(), 2);
ll.change_order(10, 0);
assert_eq!(ll.get_data_from_position(0).unwrap(), 10);
assert_eq!(ll.get_data_from_position(2).unwrap(), 2);
ll.add(25);
assert_eq!(ll.get_data_from_position(3).unwrap(), 25);
let d1 = ll.get_data_from_position(2).unwrap();
ll.change_order(ll.get_data_from_position(1).unwrap(), 2);
assert_eq!(ll.get_data_from_position(1).unwrap(), d1);
let d2 = ll.get_data_from_position(ll.len() - 2).unwrap();
ll.change_order(ll.get_data_from_position(ll.len() - 1).unwrap(), 2);
assert_eq!(ll.get_data_from_position(ll.len() - 1).unwrap(), d2);
ll.add(40);
let d3 = ll.get_data_from_position(ll.len() - 1).unwrap();
ll.change_order(ll.get_data_from_position(1).unwrap(), ll.len() - 1);
assert_eq!(ll.get_data_from_position(ll.len() - 2).unwrap(), d3);
let d4 = ll.get_data_from_position(0).unwrap();
let d5 = ll.get_data_from_position(1).unwrap();
ll.change_order(ll.get_data_from_position(0).unwrap(), 4);
assert_eq!(ll.get_data_from_position(ll.len() - 1).unwrap(), d4);
assert_eq!(ll.get_data_from_position(0).unwrap(), d5);
let d6 = ll.get_data_from_position(ll.len() - 1).unwrap();
let d7 = ll.get_data_from_position(ll.len() - 2).unwrap();
ll.change_order(ll.get_data_from_position(4).unwrap(), 0);
assert_eq!(ll.get_data_from_position(0).unwrap(), d6);
assert_eq!(ll.get_data_from_position(ll.len() - 1).unwrap(), d7);
}
use std::boxed;
#[derive(Copy, Clone, Debug)]
struct LinkedListNode<T>
where
T: Copy,
T: PartialEq,
T: Debug,
{
prev: Option<*mut LinkedListNode<T>>,
next: Option<*mut LinkedListNode<T>>,
data: T,
}
impl<T> LinkedListNode<T>
where
T: Copy,
T: PartialEq,
T: Debug,
{
pub fn new(data: T) -> Self {
LinkedListNode {
prev: None,
next: None,
data,
}
}
}
#[derive(Debug)]
pub struct LinkedList<T>
where
T: Copy,
T: PartialEq,
T: Debug,
{
head: Option<*mut LinkedListNode<T>>,
tail: Option<*mut LinkedListNode<T>>,
count: usize,
}
impl<T> LinkedList<T>
where
T: Copy,
T: PartialEq,
T: Debug,
{
pub fn new() -> Self {
LinkedList {
head: None,
tail: None,
count: 0,
}
}
pub fn len(&self) -> usize {
self.count
}
pub fn push_front(&mut self, data: T) -> Result<(), String> {
unsafe {
let node: *mut LinkedListNode<T> = boxed::Box::into_raw(boxed::Box::new(LinkedListNode::new(data)));
if self.len() == 0 {
self.head = Some(node);
self.tail = Some(node);
(*node).prev = None;
(*node).next = None;
} else {
let head = self.head.ok_or("LinkedList's head is none.".to_string())?;
self.head = Some(node);
(*head).prev = Some(node);
(*node).next = Some(head);
(*node).prev = None;
}
self.count += 1;
return Ok(());
}
}
// addするときは一番最後に入れる
pub fn add(&mut self, data: T) -> Result<(), String> {
unsafe {
let node: *mut LinkedListNode<T> = boxed::Box::into_raw(boxed::Box::new(LinkedListNode::new(data)));
if self.len() == 0 {
self.head = Some(node);
self.tail = Some(node);
(*node).prev = None;
(*node).next = None;
} else {
let tail = self.tail.ok_or("LinkedList's tail is none.".to_string())?;
self.tail = Some(node);
(*tail).next = Some(node);
(*node).prev = Some(tail);
(*node).next = None;
}
self.count += 1;
return Ok(());
}
}
pub fn remove(&mut self, data: T) -> Result<(), String> {
if self.len() == 0 {
return Err("LinkedList length is 0.".to_string());
}
if self.get_position_from_data(data).is_none() {
return Err("Data is not existing.".to_string());
}
self.count -= 1;
// 残った最後の一つの要素だった場合
if self.count == 0 {
self.head = None;
self.tail = None;
return Ok(());
}
unsafe {
let pointer: *mut LinkedListNode<T> = self.get_pointer_from_data(data).ok_or("data is not existing in LinkedList.".to_string())?;
if self.head.is_some() && self.tail.is_some() { // 基本的に要素が存在する場合は、headとtailは存在するはず
let head: *mut LinkedListNode<T> = self.head.unwrap();
let tail: *mut LinkedListNode<T> = self.tail.unwrap();
if pointer != head && pointer != tail { // headとtailの要素が今回の削除対象ではない場合
if let Some(prev) = (*pointer).prev {
replace(&mut (*prev).next, (*pointer).next);
}
if let Some(next) = (*pointer).next {
replace(&mut (*next).prev, (*pointer).prev);
}
(*pointer).prev = None;
(*pointer).next = None;
} else if pointer == head { // headが削除対象の場合
replace(&mut self.head, replace(&mut (*pointer).next, None));
replace(&mut (*self.head.unwrap()).prev, None);
} else if pointer == tail { // tailが削除対象の場合
replace(&mut self.tail, replace(&mut (*pointer).prev, None));
replace(&mut (*self.tail.unwrap()).next, None);
} else {
self.head = None;
self.tail = None;
}
return Ok(());
} else {
return Err("Element in LinkedList is null.".to_string());
}
}
}
pub fn change_order(&mut self, data: T, idx: usize) -> Result<(), String> {
let src_node_ptr: *mut LinkedListNode<T> = self.get_pointer_from_data(data).ok_or("data is not existing in LinkedList".to_string())?;
let position = self.get_position_from_data(data).ok_or("In chane_order, data's position is not found.".to_string())?;
// println!("{:?}", unsafe { (&mut *src_node_ptr) });
let order: usize =
if idx >= self.len() { self.len() - 1 }
else { idx };
unsafe {
// はじめに対象のNodeの前後の紐付きを更新する
if let Some(prev) = (*src_node_ptr).prev {
replace(&mut (*prev).next,(*src_node_ptr).next);
}
if let Some(next) = (*src_node_ptr).next {
replace(&mut (*next).prev, (*src_node_ptr).prev);
}
if order == 0 { // headになる場合
if let Some(head_node_ptr) = self.head {
if position == self.len() - 1 { // ポジションを変更したいNodeの元の位置がtailだった場合
self.tail = (*src_node_ptr).prev;
}
(*head_node_ptr).prev = Some(src_node_ptr);
(*src_node_ptr).next = Some(head_node_ptr);
(*src_node_ptr).prev = None;
self.head = Some(src_node_ptr);
return Ok(());
} else {
// もしheadが存在しない場合は、追加する(エラーとして終了でも良いが、、)
return self.add(data);
}
} else if order == self.len() - 1 { // tailになる場合
if position == 0 { // ポジションを変更したいNodeの元の位置がheadだった場合
self.head = (*src_node_ptr).next;
}
let tail_node_ptr: *mut LinkedListNode<T> = self.tail.ok_or("LinkedList is broken.".to_string())?;
(*tail_node_ptr).next = Some(src_node_ptr);
(*src_node_ptr).prev = Some(tail_node_ptr);
(*src_node_ptr).next = None;
self.tail = Some(src_node_ptr);
return Ok(());
} else {
// 挿入したい場所における入れ替え操作
// 1. 入れ替えたい先のNodeとその前の順番のNodeを取得
let dest_order_node_ptr: *mut LinkedListNode<T> = self.get_pointer_from_index(order)
.ok_or("idx argument to change_order may be out of bound.".to_string())?;
let dest_order_prev_node_ptr = (*dest_order_node_ptr).prev
.ok_or("change_order target order node's state is broken in LinkedList.".to_string())?;
// 2. 入れ替えたいNodeの紐付けを行う
(*dest_order_node_ptr).prev = Some(src_node_ptr);
(*dest_order_prev_node_ptr).next = Some(src_node_ptr);
// 3. 入れ替えたいNodeの元の位置がheadもしくはtailだった場合の処置
if position == self.len() - 1 { // ポジションを変更したいNodeの元の位置がtailだった場合
self.tail = (*src_node_ptr).prev;
} else if position == 0 { // ポジションを変更したいNodeの元の位置がheadだった場合
self.head = (*src_node_ptr).next;
}
// 4. 入れ替えたいNodeの前後の紐付けを行う
(*src_node_ptr).next = Some(dest_order_node_ptr);
(*src_node_ptr).prev = Some(dest_order_prev_node_ptr);
return Ok(());
}
}
}
pub fn get_position_from_data(&self, data: T) -> Option<usize> {
if self.len() == 0 { return None; }
let mut idx: usize = 0;
let mut node: *mut LinkedListNode<T> = self.head.or(None)?;
loop {
unsafe {
if data == (*node).data {
return Some(idx);
}
if let Some(next) = (*node).next {
idx += 1;
node = next;
} else {
break;
}
}
}
return None;
}
pub fn get_data_from_position(&self, idx: usize) -> Option<T> {
if idx >= self.count {
return None;
}
let data_ptr: *mut LinkedListNode<T> = self.get_pointer_from_index(idx).or(None)?;
return unsafe { Some((*data_ptr).data) };
}
pub fn get_next_data(&self, data: T) -> Option<T> {
match self.get_position_from_data(data) {
Some(idx) => {
if idx + 1 >= self.len() {
return None;
}
let next_data_ptr: *mut LinkedListNode<T> = self.get_pointer_from_index(idx + 1).or(None)?;
return unsafe { Some((*next_data_ptr).data) };
},
None => None,
}
}
pub fn get_prev_data(&self, data: T) -> Option<T> {
match self.get_position_from_data(data) {
Some(idx) => {
if idx <= 0 { return None; }
let prev_data_ptr: *mut LinkedListNode<T> = self.get_pointer_from_index(idx - 1).or(None)?;
return unsafe { Some((*prev_data_ptr).data) };
},
None => None,
}
}
fn get_pointer_from_data(&self, data: T) -> Option<*mut LinkedListNode<T>> {
if self.len() == 0 { return None }
let mut node: *mut LinkedListNode<T> = self.head.or(None)?;
loop {
unsafe {
if data == (*node).data {
return Some(node);
}
if let Some(next) = (*node).next {
node = next;
} else {
break;
}
}
}
return None;
}
fn get_pointer_from_index(&self, idx: usize) -> Option<*mut LinkedListNode<T>> {
if idx >= self.len() { return None; }
let mut node: *mut LinkedListNode<T> = self.head.or(None)?;
for _i in 0..idx {
unsafe {
if let Some(next) = (*node).next {
node = next;
} else {
return None;
}
}
}
return Some(node)
}
}
//#[cfg(test)]
//mod test {
// use self::util::linked_list::LinkedList;
//
// #[test]
// fn new() {
// let ll: LinkedList<i32> = LinkedList::new();
// assert_eq!(ll.len(), 0);
// }
//}
|
#[derive(Debug, PartialEq, Copy, Clone)]
pub enum TokenType<'a> {
LeftParen,
RightParen,
LeftBrace,
RightBrace,
LeftBracket,
RightBracket,
Comma,
Dot,
Colon,
Semicolon,
Slash,
Backslash,
Star,
Mod,
Hashtag,
PlusEquals,
MinusEquals,
StarEquals,
SlashEquals,
BitXorEquals,
BitOrEquals,
BitAndEquals,
ModEquals,
ListIndexer,
MapIndexer,
GridIndexer,
ArrayIndexer,
Minus,
Plus,
Incrementer,
Decrementer,
Bang,
Hook,
Tilde,
LessThanGreaterThan,
LogicalAnd,
LogicalOr,
LogicalXor,
BitAnd,
BitOr,
BitXor,
BitLeft,
BitRight,
BangEqual,
Equal,
EqualEqual,
Greater,
GreaterEqual,
Less,
LessEqual,
Macro(&'a str),
RegionBegin(&'a str),
RegionEnd(&'a str),
Define,
Var,
GlobalVar,
If,
Else,
Return,
For,
Repeat,
With,
While,
Do,
Until,
Switch,
Case,
DefaultCase,
Break,
Exit,
Enum,
AndAlias,
OrAlias,
XorAlias,
NotAlias,
ModAlias,
Div,
Then,
Newline(usize),
Identifier(&'a str),
String(&'a str),
Number(&'a str),
NumberStartDot(&'a str),
NumberEndDot(&'a str),
Comment(&'a str),
MultilineComment(&'a str),
UnidentifiedInput(&'a str),
}
#[derive(Debug, PartialEq, Copy, Clone)]
pub struct Token<'a> {
pub token_type: TokenType<'a>,
pub line_number: u32,
pub column_number: u32,
}
impl<'a> Token<'a> {
pub fn new(token_type: TokenType, line_number: u32, column_number: u32) -> Token {
Token {
token_type,
line_number,
column_number,
}
}
}
use std::fmt;
impl<'a> fmt::Display for Token<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Token {:#?} on {}:{}.",
self.token_type, self.line_number, self.column_number
)
}
}
|
use crate::features::syntax::StatementFeature;
use crate::parse::visitor::tests::assert_no_stmt_feature;
use crate::parse::visitor::tests::assert_stmt_feature;
mod rest_args;
#[test]
fn func_decl() {
assert_stmt_feature(
"function foo() {
}",
StatementFeature::FunctionDeclaration,
)
}
#[test]
fn export_func_decl() {
assert_stmt_feature(
"export function foo() {
}",
StatementFeature::FunctionDeclaration,
)
}
#[test]
fn anon_func_decl() {
assert_stmt_feature(
"export default function() {
} 123",
StatementFeature::AnonymousFunctionDeclaration,
)
}
#[test]
fn export_default_named_func_decl() {
assert_stmt_feature(
"export default function foo() {}",
StatementFeature::FunctionDeclaration,
)
}
#[test]
fn generator_func_decl() {
assert_stmt_feature(
"function* gen(arg) {
yield* arg;
}",
StatementFeature::GeneratorFunctionDeclaration,
)
}
#[test]
fn anon_generator_func_decl() {
assert_stmt_feature(
"export default function*(arg) {
yield* arg;
}",
StatementFeature::AnonymousGeneratorFunctionDeclaration,
)
}
#[test]
fn export_generator_func_decl() {
assert_stmt_feature(
"export function* a(arg) {
}",
StatementFeature::GeneratorFunctionDeclaration,
)
}
#[test]
fn async_func_decl() {
assert_stmt_feature(
"async function gen(arg) {
await arg;
}",
StatementFeature::AsyncFunctionDeclaration,
)
}
#[test]
fn anon_async_func_decl() {
assert_stmt_feature(
"export default async function(arg) {
await arg;
}",
StatementFeature::AnonymousAsyncFunctionDeclaration,
)
}
#[test]
fn export_async_func_decl() {
assert_stmt_feature(
"export async function a(arg) {
}",
StatementFeature::AsyncFunctionDeclaration,
)
}
#[test]
fn async_generator_func_decl() {
assert_stmt_feature(
"async function* gen(arg) {
yield* (await arg);
}",
StatementFeature::AsyncGeneratorFunctionDeclaration,
)
}
#[test]
fn anon_async_generator_func_decl() {
assert_stmt_feature(
"export default async function*(arg) {
yield* (await arg);
}",
StatementFeature::AnonymousAsyncGeneratorFunctionDeclaration,
)
}
#[test]
fn export_async_generator_func_decl() {
assert_stmt_feature(
"export async function* a(arg) {
}",
StatementFeature::AsyncGeneratorFunctionDeclaration,
)
}
|
use crate::apps::{data::TransferOwnership, service::AdminService, Members};
use actix_web::{web, HttpResponse};
use drogue_cloud_service_api::auth::user::UserInformation;
use std::ops::Deref;
pub struct WebData<S: AdminService> {
pub service: S,
}
impl<S: AdminService> Deref for WebData<S> {
type Target = S;
fn deref(&self) -> &Self::Target {
&self.service
}
}
/// Initiate an ownership transfer
pub async fn transfer<S>(
user: UserInformation,
service: web::Data<WebData<S>>,
app_id: web::Path<String>,
payload: web::Json<TransferOwnership>,
) -> Result<HttpResponse, actix_web::Error>
where
S: AdminService + 'static,
{
let result = match service
.transfer(&user, app_id.into_inner(), payload.0)
.await
{
Ok(key) => Ok(HttpResponse::Accepted().json(key)),
Err(e) => Err(e.into()),
};
result
}
/// Cancel an ownership transfer
pub async fn cancel<S>(
user: UserInformation,
service: web::Data<WebData<S>>,
app_id: web::Path<String>,
) -> Result<HttpResponse, actix_web::Error>
where
S: AdminService + 'static,
{
let result = match service.cancel(&user, app_id.into_inner()).await {
Ok(key) => Ok(HttpResponse::NoContent().json(key)),
Err(e) => Err(e.into()),
};
result
}
/// Accept an ownership transfer
pub async fn accept<S>(
user: UserInformation,
service: web::Data<WebData<S>>,
app_id: web::Path<String>,
) -> Result<HttpResponse, actix_web::Error>
where
S: AdminService + 'static,
{
let result = match service.accept(&user, app_id.into_inner()).await {
Ok(key) => Ok(HttpResponse::NoContent().json(key)),
Err(e) => Err(e.into()),
};
result
}
/// Get member list
pub async fn get_members<S>(
user: UserInformation,
service: web::Data<WebData<S>>,
app_id: web::Path<String>,
) -> Result<HttpResponse, actix_web::Error>
where
S: AdminService + 'static,
{
let result = match service.get_members(&user, app_id.into_inner()).await {
Ok(members) => Ok(HttpResponse::Ok().json(members)),
Err(e) => Err(e.into()),
};
result
}
/// Set member list
pub async fn set_members<S>(
user: UserInformation,
service: web::Data<WebData<S>>,
app_id: web::Path<String>,
payload: web::Json<Members>,
) -> Result<HttpResponse, actix_web::Error>
where
S: AdminService + 'static,
{
let result = match service
.set_members(&user, app_id.into_inner(), payload.0)
.await
{
Ok(_) => Ok(HttpResponse::NoContent().finish()),
Err(e) => Err(e.into()),
};
result
}
|
/*!
```rudra-poc
[target]
crate = "arr"
version = "0.6.0"
[[target.peer]]
crate = "crossbeam-utils"
version = "0.7.2"
[test]
cargo_flags = ["--release"]
cargo_toolchain = "nightly"
[report]
issue_url = "https://github.com/sjep/array/issues/1"
issue_date = 2020-08-25
rustsec_url = "https://github.com/RustSec/advisory-db/pull/364"
rustsec_id = "RUSTSEC-2020-0034"
[[bugs]]
analyzer = "Manual"
guide = "UnsafeDestructor"
bug_class = "Other"
bug_count = 3
rudra_report_locations = []
[[bugs]]
analyzer = "SendSyncVariance"
bug_class = "SendSyncVariance"
bug_count = 2
rudra_report_locations = ["src/lib.rs:47:1: 47:35", "src/lib.rs:46:1: 46:35"]
```
!*/
#![forbid(unsafe_code)]
use arr::Array;
use crossbeam_utils::thread;
use std::rc::Rc;
use std::sync::atomic::{AtomicUsize, Ordering};
static drop_cnt: AtomicUsize = AtomicUsize::new(0);
#[derive(Clone)]
struct DropDetector(u32);
impl Drop for DropDetector {
fn drop(&mut self) {
drop_cnt.fetch_add(1, Ordering::Relaxed);
println!("Dropping {}", self.0);
}
}
fn main() {
{
// https://github.com/sjep/array/blob/efa214159eaad2abda7b072f278d678f8788c307/src/lib.rs#L46-L47
// 1. Incorrect Sync/Send bounds for `Array` allows to smuggle non-Sync/Send types across the thread boundary
let rc = Rc::new(0usize);
let arr = Array::new_from_template(1, &rc);
let arr_handle = &arr;
let rc_identity1 = Rc::as_ptr(&rc) as usize;
let rc_identity2 = thread::scope(|s| {
s.spawn(|_| {
// shouldn't be allowed!
println!("1. Cloning Rc in a different thread");
let another_rc: Rc<usize> = arr_handle[0].clone();
Rc::as_ptr(&another_rc) as usize
})
.join()
.unwrap()
})
.unwrap();
assert_eq!(rc_identity1, rc_identity2);
}
{
// https://github.com/sjep/array/blob/efa214159eaad2abda7b072f278d678f8788c307/src/lib.rs#L129-L148
// 2. `Index` and `IndexMut` does not check the bound
let arr = Array::<usize>::zero(1);
println!("2. OOB read: {}", arr[10]);
}
{
// https://github.com/sjep/array/blob/efa214159eaad2abda7b072f278d678f8788c307/src/lib.rs#L111-L127
// https://github.com/sjep/array/blob/efa214159eaad2abda7b072f278d678f8788c307/src/lib.rs#L165-L174
// 3. `Array::new_from_template()` drops uninitialized memory because of `*ptr = value` pattern.
// It also leaks memory since it doesn't call `drop_in_place()` in `drop()`.
println!("3. Uninitialized drop / memory leak in `new_from_template()`");
let _ = Array::new_from_template(1, &DropDetector(12345));
}
}
|
pub use itertools::Itertools as _;
use unzip_n::unzip_n;
unzip_n!(pub 4);
|
use std::io::File;
use std::io::BufferedReader;
fn main() {
let mut source = BufferedReader::new(
File::open(&Path::new("file.rs"))
);
let mut c:int = 0;
for line in source.lines() {
c+=1;
print!("{}: {}", c, line.unwrap());
}
}
|
use crate::yield_now::yield_now;
use std::sync::atomic::{AtomicUsize, Ordering};
pub struct DropGuard<'a>(&'a DelayDrop);
pub struct DelayDrop {
// can_drop & 0x1 is the flag that when kernel is done
// can_drop & 0x2 is the flag that when kernel is started
can_drop: AtomicUsize,
}
impl DelayDrop {
pub fn new() -> Self {
DelayDrop {
can_drop: AtomicUsize::new(0),
}
}
pub fn delay_drop(&self) -> DropGuard {
self.can_drop.store(2, Ordering::Release);
DropGuard(self)
}
#[allow(dead_code)]
#[inline]
pub fn reset(&self) {
// wait the kernel finished
while self.can_drop.load(Ordering::Acquire) == 2 {
yield_now();
}
self.can_drop.store(0, Ordering::Release);
}
}
impl<'a> Drop for DropGuard<'a> {
fn drop(&mut self) {
// kernel would set it to true
self.0.can_drop.fetch_and(1, Ordering::Release);
}
}
impl Drop for DelayDrop {
fn drop(&mut self) {
// wait for drop
while self.can_drop.load(Ordering::Acquire) == 2 {
yield_now();
}
}
}
|
use crate::name_resolution::TopLevelContext;
use crate::rustspec::*;
use crate::rustspec_to_coq_base::*;
use crate::rustspec_to_coq_ssprove_pure;
use crate::rustspec_to_coq_ssprove_state;
use crate::rustspec_to_coq_ssprove_state::translate_base_typ;
use core::slice::Iter;
use itertools::Itertools;
use pretty::RcDoc;
use rustc_session::Session;
use rustc_span::DUMMY_SP;
use std::fs::File;
use std::io::Write;
use std::path;
fn translate_constructor<'a>(enum_name: TopLevelIdent) -> RcDoc<'a> {
RcDoc::as_string(enum_name.string)
}
fn translate_enum_case_name<'a>(
enum_name: BaseTyp,
case_name: TopLevelIdent,
explicit: bool,
) -> RcDoc<'a> {
match enum_name {
BaseTyp::Named(name, opts) => match opts {
None => translate_constructor(case_name),
Some(tyvec) => if explicit && tyvec.len() != 0 {
RcDoc::as_string("@")
} else {
RcDoc::nil()
}
.append(translate_constructor(case_name))
.append(
if (name.0).string == "Option" || (name.0).string == "Result" {
RcDoc::nil()
} else {
make_paren(translate_toplevel_ident(name.0))
},
)
.append(if explicit && tyvec.len() != 0 {
RcDoc::space().append(RcDoc::intersperse(
tyvec
.into_iter()
.map(|(x, _)| rustspec_to_coq_ssprove_state::translate_base_typ(x)),
RcDoc::space(),
))
} else {
RcDoc::nil()
}),
},
_ => panic!("should not happen"),
}
}
fn translate_pattern_tick<'a>(p: Pattern) -> RcDoc<'a, ()> {
match p {
// If the pattern is a tuple, expand it
Pattern::Tuple(_) => RcDoc::as_string("'").append(translate_pattern(p)),
_ => translate_pattern(p),
}
}
fn translate_pattern<'a>(p: Pattern) -> RcDoc<'a, ()> {
match p {
Pattern::EnumCase(ty_name, name, None) => {
translate_enum_case_name(ty_name, name.0.clone(), false)
}
Pattern::EnumCase(ty_name, name, Some(inner_pat)) => {
translate_enum_case_name(ty_name, name.0.clone(), false)
.append(RcDoc::space())
.append(make_paren(translate_pattern(inner_pat.0)))
}
Pattern::IdentPat(x, _) => translate_ident(x.clone()),
Pattern::LiteralPat(x) => translate_literal(x.clone()),
Pattern::WildCard => RcDoc::as_string("_"),
Pattern::Tuple(pats) => make_tuple(pats.into_iter().map(|(pat, _)| translate_pattern(pat))),
}
}
fn translate_literal<'a>(lit: Literal) -> RcDoc<'a, ()> {
match lit {
Literal::Unit => RcDoc::as_string("(tt : unit_ChoiceEquality)"),
Literal::Bool(true) => RcDoc::as_string("(true : bool_ChoiceEquality)"),
Literal::Bool(false) => RcDoc::as_string("(false : bool_ChoiceEquality)"),
Literal::Int128(x) => RcDoc::as_string(format!("@repr U128 {}", x)),
Literal::UInt128(x) => RcDoc::as_string(format!("@repr U128 {}", x)),
Literal::Int64(x) => RcDoc::as_string(format!("@repr U64 {}", x)),
Literal::UInt64(x) => RcDoc::as_string(format!("@repr U64 {}", x)),
Literal::Int32(x) => RcDoc::as_string(format!("@repr U32 {}", x)),
Literal::UInt32(x) => RcDoc::as_string(format!("@repr U32 {}", x)),
Literal::Int16(x) => RcDoc::as_string(format!("@repr U16 {}", x)),
Literal::UInt16(x) => RcDoc::as_string(format!("@repr U16 {}", x)),
Literal::Int8(x) => RcDoc::as_string(format!("@repr U8 {}", x)),
Literal::UInt8(x) => RcDoc::as_string(format!("@repr U8 {}", x)),
Literal::Isize(x) => RcDoc::as_string(format!("isize {}", x)),
Literal::Usize(x) => RcDoc::as_string(format!("usize {}", x)),
Literal::UnspecifiedInt(_) => panic!("Got a `UnspecifiedInt` literal: those should have been resolved into concrete types during the typechecking phase"),
Literal::Str(msg) => RcDoc::as_string(format!("\"{}\"", msg)),
}
}
fn make_let_binding<'a>(
pat: Pattern,
typ: Option<RcDoc<'a, ()>>,
expr: RcDoc<'a, ()>,
monad_bind: bool,
early_return_typ: Option<CarrierTyp>,
) -> RcDoc<'a, ()> {
let mutable = if let Pattern::IdentPat(_, true) = pat.clone() {
true
} else {
false
};
RcDoc::as_string("letb")
.append(if monad_bind {
RcDoc::as_string("nd")
.append(if mutable {
RcDoc::as_string("m")
} else {
RcDoc::nil()
})
.append(make_paren(match early_return_typ.clone() {
Some(CarrierTyp::Result(_, (c, _))) => {
RcDoc::as_string("ChoiceEqualityMonad.result_bind_both ")
.append(rustspec_to_coq_ssprove_state::translate_base_typ(c))
}
Some(CarrierTyp::Option(_)) => {
RcDoc::as_string("ChoiceEqualityMonad.option_bind_both")
}
None => RcDoc::as_string("_"),
}))
} else if mutable {
RcDoc::as_string("m")
} else {
RcDoc::nil()
})
.append(RcDoc::space())
.append(
match typ.clone() {
None => translate_pattern_tick(pat.clone()),
Some(tau) => translate_pattern_tick(pat.clone())
.append(RcDoc::space())
.append(RcDoc::as_string(":"))
.append(RcDoc::space())
.append(tau),
}
.group(),
)
.append(RcDoc::space())
.append(if mutable && !monad_bind {
RcDoc::as_string("loc(")
.append(RcDoc::space())
.append(translate_pattern(pat.clone()))
.append(RcDoc::as_string("_loc"))
.append(RcDoc::space())
.append(RcDoc::as_string(")"))
.append(RcDoc::space())
} else {
RcDoc::nil()
})
.append(RcDoc::as_string(":="))
.group()
.append(RcDoc::line().append(expr.group()))
.nest(2)
.append(RcDoc::space())
.append(RcDoc::as_string("in"))
}
fn translate_expression<'a>(e: Expression, top_ctx: &'a TopLevelContext) -> RcDoc<'a, ()> {
match e {
Expression::MonadicLet(..) => panic!("TODO: Coq support for Expression::MonadicLet"),
Expression::QuestionMark(..) => {
// TODO: eliminiate this `panic!` with nicer types (See issue #303)
panic!("[Expression::QuestionMark] nodes should have been eliminated before printing.")
}
Expression::Binary((op, _), e1, e2, op_typ) => {
make_paren(translate_expression((*e1).0, top_ctx))
.append(RcDoc::space())
.append(translate_binop(
RcDoc::as_string("."),
op,
op_typ.as_ref().unwrap(),
top_ctx,
))
.append(RcDoc::space())
.append(make_paren(translate_expression((*e2).0, top_ctx)))
}
Expression::MatchWith(arg, arms) => RcDoc::as_string("TODO match"),
Expression::FieldAccessor(e1, field) => {
unimplemented!()
}
Expression::EnumInject(enum_name, case_name, payload) => {
let trans = match payload {
None => RcDoc::nil(),
Some(payload) => RcDoc::space().append(make_paren(translate_expression(
*payload.0.clone(),
top_ctx,
))),
};
translate_enum_case_name(enum_name.clone(), case_name.0.clone(), true).append(trans)
}
Expression::InlineConditional(cond, e_t, e_f) => RcDoc::as_string("if")
.append(RcDoc::space())
.append(RcDoc::as_string("is_pure (I := [interface])"))
.append(RcDoc::space())
.append(make_paren(translate_expression((*cond).0, top_ctx)))
.append(RcDoc::line())
.append(RcDoc::as_string("then"))
.append(RcDoc::space())
.append(translate_expression((*e_t).0, top_ctx))
.append(RcDoc::line())
.append(RcDoc::as_string("else"))
.append(RcDoc::space())
.append(translate_expression((*e_f).0, top_ctx)),
Expression::Unary(op, e1, op_typ) => translate_unop(op, op_typ.as_ref().unwrap().clone())
.append(RcDoc::space())
.append(make_paren(translate_expression((*e1).0, top_ctx)))
.group(),
Expression::Lit(lit) => RcDoc::as_string("lift_to_both0")
.append(RcDoc::space())
.append(make_paren(translate_literal(lit.clone()))),
Expression::Tuple(es) => {
let iter = es
.into_iter()
.map(|(e, _)| translate_expression(e, top_ctx));
match &iter.size_hint().1 {
Some(0) => RcDoc::as_string("tt"),
Some(1) => RcDoc::intersperse(iter, RcDoc::nil()),
_ => RcDoc::as_string("prod_b(")
.append(
RcDoc::line_()
.append(RcDoc::intersperse(
iter,
RcDoc::as_string(",").append(RcDoc::line()),
))
.group()
.nest(2),
)
.append(RcDoc::line_())
.append(RcDoc::as_string(")"))
.group(),
}
}
Expression::Named(p) => RcDoc::as_string("lift_to_both0")
.append(RcDoc::space())
.append(translate_ident(p.clone())),
Expression::FuncCall(prefix, name, args, arg_types) => {
let (func_name, additional_args, func_ret_ty, extra_info) =
rustspec_to_coq_ssprove_pure::translate_func_name(
prefix.clone(),
Ident::TopLevel(name.0.clone()),
top_ctx,
arg_types.unwrap(),
);
let total_args = args.len() + additional_args.len();
func_name
// We append implicit arguments first
.append(RcDoc::concat(
additional_args
.into_iter()
.map(|arg| RcDoc::space().append(make_paren(arg))),
))
// Then the explicit arguments
.append(RcDoc::concat(args.into_iter().enumerate().map(
|(i, ((arg, _), _))| {
RcDoc::space().append(make_paren(if i < extra_info.len() {
let (pre_arg, post_arg) = extra_info[i].clone();
pre_arg
.clone()
.append(translate_expression(arg, top_ctx))
.append(post_arg.clone())
} else {
translate_expression(arg, top_ctx)
}))
},
)))
.append(if total_args == 0 {
RcDoc::space()
} else {
RcDoc::nil()
})
}
Expression::MethodCall(sel_arg, sel_typ, (f, _), args, arg_types) => {
if f.string == "clone" {
// Then the self argument
make_paren(translate_expression((sel_arg.0).0, top_ctx))
// And finally the rest of the arguments
.append(RcDoc::concat(args.into_iter().map(|((arg, _), _)| {
RcDoc::space().append(make_paren(translate_expression(arg, top_ctx)))
})))
} else {
let (func_name, additional_args, func_ret_ty, extra_info) =
rustspec_to_coq_ssprove_pure::translate_func_name(
sel_typ.clone().map(|x| x.1),
Ident::TopLevel(f.clone()),
top_ctx,
arg_types.unwrap(),
);
func_name // We append implicit arguments first
.append(RcDoc::concat(
additional_args
.into_iter()
.map(|arg| RcDoc::space().append(make_paren(arg))),
))
.append(RcDoc::space())
// Then the self argument
.append(make_paren(translate_expression((sel_arg.0).0, top_ctx)))
// And finally the rest of the arguments
.append(RcDoc::concat(args.into_iter().enumerate().map(
|(i, ((arg, _), _))| {
RcDoc::space().append(make_paren(if i < extra_info.len() {
let (pre_arg, post_arg) = extra_info[i].clone();
pre_arg
.clone()
.append(translate_expression(arg, top_ctx))
.append(post_arg.clone())
} else {
translate_expression(arg, top_ctx)
}))
},
)))
}
}
Expression::ArrayIndex(x, e2, typ) => {
let array_or_seq = array_or_seq(typ.unwrap(), top_ctx);
let trans_e2 = translate_expression((*e2).0, top_ctx);
array_or_seq
.append(RcDoc::as_string("_index"))
.append(RcDoc::space())
.append(make_paren(translate_ident(x.0.clone())))
.append(RcDoc::space())
.append(make_paren(trans_e2))
}
Expression::NewArray(_array_name, inner_ty, args) => {
let inner_ty = inner_ty.unwrap();
// inner_ty is the type of the cell elements
// TODO: do the case when _array_name is None (the Seq case)
match _array_name {
// Seq case
None => make_list(
args.into_iter()
.map(|(e, _)| translate_expression(e.clone(), top_ctx)),
),
Some(_) =>
// Array case
{
RcDoc::as_string(format!("@{}_from_list", ARRAY_MODULE))
.append(RcDoc::space())
.append(rustspec_to_coq_ssprove_state::translate_base_typ(
inner_ty.clone(),
))
.append(RcDoc::space())
.append(make_paren(make_list(args.into_iter().map(|(e, _)| {
make_paren(translate_expression(e, top_ctx))
.append(RcDoc::as_string(" : "))
.append(rustspec_to_coq_ssprove_state::translate_base_typ(
inner_ty.clone(),
))
}))))
}
}
}
Expression::IntegerCasting(x, new_t, old_t) => {
{
let old_t = old_t.unwrap();
match old_t {
BaseTyp::Usize | BaseTyp::Isize => {
let new_t_doc = match &new_t.0 {
BaseTyp::UInt8 => RcDoc::as_string("pub_u8"),
BaseTyp::UInt16 => RcDoc::as_string("pub_u16"),
BaseTyp::UInt32 => RcDoc::as_string("pub_u32"),
BaseTyp::UInt64 => RcDoc::as_string("pub_u64"),
BaseTyp::UInt128 => RcDoc::as_string("pub_u128"),
BaseTyp::Usize => RcDoc::as_string("usize"),
BaseTyp::Int8 => RcDoc::as_string("pub_i8"),
BaseTyp::Int16 => RcDoc::as_string("pub_i16"),
BaseTyp::Int32 => RcDoc::as_string("pub_i32"),
BaseTyp::Int64 => RcDoc::as_string("pub_i64"),
BaseTyp::Int128 => RcDoc::as_string("pub_i28"),
BaseTyp::Isize => RcDoc::as_string("isize"),
_ => panic!(), // should not happen
};
let trans_x = translate_expression(x.0.clone(), top_ctx);
new_t_doc.append(RcDoc::space()).append(make_paren(
RcDoc::as_string("is_pure")
.append(RcDoc::space())
.append(make_paren(trans_x)),
))
}
_ => {
let new_t_doc = match &new_t.0 {
BaseTyp::UInt8 => String::from("uint8"),
BaseTyp::UInt16 => String::from("uint16"),
BaseTyp::UInt32 => String::from("uint32"),
BaseTyp::UInt64 => String::from("uint64"),
BaseTyp::UInt128 => String::from("uint128"),
BaseTyp::Usize => String::from("uint32"),
BaseTyp::Int8 => String::from("int8"),
BaseTyp::Int16 => String::from("int16"),
BaseTyp::Int32 => String::from("int32"),
BaseTyp::Int64 => String::from("int64"),
BaseTyp::Int128 => String::from("int128"),
BaseTyp::Isize => String::from("int32"),
BaseTyp::Named((TopLevelIdent { string: s, .. }, _), None) => s.clone(),
_ => panic!(), // should not happen
};
let _secret = match &new_t.0 {
BaseTyp::Named(_, _) => true,
_ => false,
};
let trans_x = translate_expression(x.as_ref().0.clone(), top_ctx);
RcDoc::as_string("(fun x => lift_to_both0 (repr (unsigned x)))")
.append(make_paren(trans_x))
.group()
}
}
}
}
}
}
fn translate_statements<'a>(
mut statements: Iter<Spanned<Statement>>,
top_ctx: &'a TopLevelContext,
smv: ScopeMutableVars,
function_dependencies: FunctionDependencies,
) -> RcDoc<'a, ()> {
let s = match statements.next() {
None => return RcDoc::nil(),
Some(s) => s.clone(),
};
match s.0 {
Statement::LetBinding((pat, _), typ, (expr, _), carrier, question_mark) => {
make_let_binding(
pat.clone(),
typ.map(|(typ, _)| rustspec_to_coq_ssprove_state::translate_typ(typ)),
translate_expression(expr.clone(), top_ctx),
question_mark.is_some(),
carrier,
)
}
Statement::Reassignment((x, _), _x_typ, (e1, _), carrier, question_mark) => {
make_let_binding(
Pattern::IdentPat(x.clone(), true),
None,
translate_expression(e1.clone(), top_ctx),
question_mark.is_some(),
carrier,
)
}
Statement::ArrayUpdate((x, _), (e1, _), (e2, _), carrier, question_mark, typ) => {
let array_or_seq = array_or_seq(typ.clone().unwrap(), top_ctx);
let trans_e1 = translate_expression(e1.clone(), top_ctx);
let trans_e2 = translate_expression(e2.clone(), top_ctx);
let expr = {
let array_upd_payload = array_or_seq
.append(RcDoc::as_string("_upd"))
.append(RcDoc::space())
.append(translate_ident(x.clone()))
.append(RcDoc::space())
.append(make_paren(trans_e1))
.append(RcDoc::space())
.append(make_paren(
RcDoc::as_string("is_pure ").append(make_paren(trans_e2)),
));
make_let_binding(
Pattern::IdentPat(x.clone(), false),
typ.clone()
.map(|(_, (x, _))| rustspec_to_coq_ssprove_state::translate_base_typ(x)),
array_upd_payload,
question_mark.is_some(),
carrier,
)
};
expr
}
Statement::ReturnExp(e1, _typ) => RcDoc::as_string("lift_scope")
.append(RcDoc::space())
.append(RcDoc::as_string("(H_loc_incl := _) (H_opsig_incl := _)"))
.append(RcDoc::space())
.append(make_paren(translate_expression(e1.clone(), top_ctx))),
Statement::Conditional((cond, _), (mut b1, _), b2, mutated) => {
let mutated_info = mutated.unwrap();
let pat = Pattern::Tuple(
mutated_info
.vars
.0
.iter()
.sorted()
.map(|i| {
(
Pattern::IdentPat(Ident::Local(i.clone()), i.mutable),
DUMMY_SP.into(),
)
})
.collect(),
);
let b1_question_mark = *b1.contains_question_mark.as_ref().unwrap();
let b2_question_mark = match &b2 {
None => false,
Some(b2) => *b2.0.contains_question_mark.as_ref().unwrap(),
};
let either_blocks_contains_question_mark = b1_question_mark || b2_question_mark;
b1.stmts.push(add_ok_if_result(
mutated_info.stmt.clone(),
if either_blocks_contains_question_mark {
mutated_info.early_return_type.clone()
} else {
None
},
));
let trans_cond = translate_expression(cond.clone(), top_ctx);
let block_1 = translate_block(b1.clone(), true, top_ctx);
let else_expr = match b2.clone() {
None => translate_statements(
vec![add_ok_if_result(
mutated_info.stmt.clone(),
if either_blocks_contains_question_mark {
mutated_info.early_return_type.clone()
} else {
None
},
)]
.iter(),
top_ctx,
smv.clone(),
function_dependencies.clone(),
),
Some((mut b2, _)) => {
b2.stmts.push(add_ok_if_result(
mutated_info.stmt.clone(),
if either_blocks_contains_question_mark {
mutated_info.early_return_type.clone()
} else {
None
},
));
let block2_expr = translate_block(b2.clone(), true, top_ctx);
RcDoc::space()
.append(RcDoc::as_string("lift_scope"))
.append(RcDoc::space())
.append(make_paren(RcDoc::as_string("L1 := ").append(
rustspec_to_coq_ssprove_state::fset_from_scope(b2.mutable_vars.clone()),
)))
.append(RcDoc::space())
.append(make_paren(RcDoc::as_string("L2 := ").append(
rustspec_to_coq_ssprove_state::fset_from_scope(smv.clone()),
)))
.append(RcDoc::space())
.append(make_paren(
RcDoc::as_string("I1 := ").append(RcDoc::as_string("[interface]")),
))
.append(RcDoc::space())
.append(make_paren(
RcDoc::as_string("I2 := ").append(RcDoc::as_string("[interface]")),
))
.append(RcDoc::space())
.append(RcDoc::as_string("(H_loc_incl := _) (H_opsig_incl := _)"))
.append(RcDoc::space())
.append(make_paren(block2_expr))
}
};
let expr = RcDoc::as_string("if")
.append(RcDoc::space())
.append(trans_cond.clone())
.append(RcDoc::space())
.append(RcDoc::as_string(":bool_ChoiceEquality"))
.append(RcDoc::line())
.append(RcDoc::as_string("then"))
.append(RcDoc::space())
.append(RcDoc::as_string("lift_scope"))
.append(RcDoc::space())
.append(make_paren(RcDoc::as_string("L1 := ").append(
rustspec_to_coq_ssprove_state::fset_from_scope(b1.mutable_vars.clone()),
)))
.append(RcDoc::space())
.append(make_paren(RcDoc::as_string("L2 := ").append(
rustspec_to_coq_ssprove_state::fset_from_scope(smv.clone()),
)))
.append(RcDoc::space())
.append(make_paren(
RcDoc::as_string("I1 := ").append(RcDoc::as_string("[interface]")),
))
.append(RcDoc::space())
.append(make_paren(
RcDoc::as_string("I2 := ").append(RcDoc::as_string("[interface]")),
))
.append(RcDoc::space())
.append(RcDoc::as_string("(H_loc_incl := _) (H_opsig_incl := _)"))
.append(RcDoc::space())
.append(make_paren(block_1.clone()))
.append(RcDoc::line())
.append(RcDoc::as_string("else"))
.append(RcDoc::space())
.append(else_expr);
make_let_binding(
pat,
None,
expr,
either_blocks_contains_question_mark,
mutated_info.early_return_type.clone(),
)
}
Statement::ForLoop(x, (e1, _), (e2, _), (mut b, _)) => {
let mutated_info = b.mutated.clone().unwrap();
let b_question_mark = *b.contains_question_mark.as_ref().unwrap();
b.stmts.push(add_ok_if_result(
mutated_info.stmt.clone(),
if b_question_mark {
mutated_info.early_return_type.clone()
} else {
None
},
));
let mut_tuple = {
// if there is only one element, just print the identifier instead of making a tuple
if mutated_info.vars.0.len() == 1 {
match mutated_info.vars.0.iter().next() {
None => Pattern::WildCard,
Some(i) => Pattern::IdentPat(Ident::Local(i.clone()), false),
}
}
// print as tuple otherwise
else {
Pattern::Tuple(
mutated_info
.vars
.0
.iter()
.sorted()
.map(|i| {
(
Pattern::IdentPat(Ident::Local(i.clone()), false),
DUMMY_SP.into(),
)
})
.collect(),
)
}
};
make_let_binding(
mut_tuple.clone(),
None,
if b_question_mark {
RcDoc::as_string("foldi_bind_both'")
} else {
RcDoc::as_string("foldi_both'")
}
.append(RcDoc::space())
.append(make_paren(translate_expression(e1, top_ctx)))
.append(RcDoc::space())
.append(make_paren(translate_expression(e2, top_ctx)))
.append(RcDoc::space())
.append(match mut_tuple.clone() {
Pattern::Tuple(_) => {
RcDoc::as_string("prod_ce").append(translate_pattern(mut_tuple.clone()))
}
_ => translate_pattern(mut_tuple.clone()),
})
.append(RcDoc::space())
.append(make_paren(RcDoc::as_string("L := ").append(make_paren(
rustspec_to_coq_ssprove_state::fset_from_scope(smv.clone()),
))))
.append(RcDoc::space())
.append(make_paren(
RcDoc::as_string("I := ").append(RcDoc::as_string("[interface]")),
))
.append(RcDoc::space())
.append(make_paren(
RcDoc::as_string("fun")
.append(RcDoc::space())
.append(match x.clone() {
Some((x, _)) => translate_ident(x.clone()),
None => RcDoc::as_string("_"),
})
.append(RcDoc::space())
.append(translate_pattern_tick(mut_tuple.clone()))
.append(RcDoc::space())
.append(RcDoc::as_string("=>"))
.append(RcDoc::line())
.append(translate_block(b, true, top_ctx)),
))
.group()
.nest(2),
b_question_mark,
mutated_info.early_return_type.clone(),
)
}
}
.group()
.append(RcDoc::line())
.append(translate_statements(
statements,
top_ctx,
smv,
function_dependencies.clone(),
))
}
fn translate_block<'a>(
b: Block,
omit_extra_unit: bool,
top_ctx: &'a TopLevelContext,
) -> RcDoc<'a, ()> {
let mut statements = b.stmts;
match (&b.return_typ, omit_extra_unit) {
(None, _) => panic!(), // should not happen,
(Some(((Borrowing::Consumed, _), (BaseTyp::Tuple(args), _))), false) if args.is_empty() => {
statements.push((
Statement::ReturnExp(Expression::Lit(Literal::Unit), b.return_typ),
DUMMY_SP.into(),
));
}
(Some(_), _) => (),
}
let trans_stmt = translate_statements(
statements.iter(),
top_ctx,
b.mutable_vars.clone(),
b.function_dependencies.clone(),
);
trans_stmt.group()
}
fn translate_item<'a>(item: DecoratedItem, top_ctx: &'a TopLevelContext) -> RcDoc<'a, ()> {
match item.item.clone() {
Item::FnDecl((f, _), sig, (b, _)) => {
let (block_vars, block_var_loc_defs) =
rustspec_to_coq_ssprove_state::fset_and_locations(sig.mutable_vars.clone());
block_var_loc_defs.append({
let block_exprs = translate_block(b.clone(), false, top_ctx);
let interface = RcDoc::as_string("[interface]");
let fun_inp_notation_0 = RcDoc::as_string("Notation")
.append(RcDoc::space())
.append(RcDoc::as_string("\"'"))
.append(
translate_ident(Ident::TopLevel(f.clone()))
.append(RcDoc::as_string("_inp")),
)
.append(RcDoc::as_string("'\""))
.append(RcDoc::space())
.append(RcDoc::as_string(":="))
.append(make_paren(
if sig.args.is_empty() {
RcDoc::as_string("unit_ChoiceEquality")
} else {
RcDoc::intersperse(
sig.args.iter().map(|((_x, _), (tau, _))| {
rustspec_to_coq_ssprove_state::translate_typ(tau.clone())
}),
RcDoc::space()
.append(RcDoc::as_string("'×"))
.append(RcDoc::space()),
)
}
.append(RcDoc::as_string(" : choice_type")),
))
.append(RcDoc::as_string(" (in custom pack_type at level 2)."));
let fun_inp_notation_1 = RcDoc::as_string("Notation")
.append(RcDoc::space())
.append(RcDoc::as_string("\"'"))
.append(
translate_ident(Ident::TopLevel(f.clone()))
.append(RcDoc::as_string("_inp")),
)
.append(RcDoc::as_string("'\""))
.append(RcDoc::space())
.append(RcDoc::as_string(":="))
.append(make_paren(
if sig.args.is_empty() {
RcDoc::as_string("unit_ChoiceEquality")
} else {
RcDoc::intersperse(
sig.args.iter().map(|((_x, _), (tau, _))| {
rustspec_to_coq_ssprove_state::translate_typ(tau.clone())
}),
RcDoc::space()
.append(RcDoc::as_string("'×"))
.append(RcDoc::space()),
)
}
.append(RcDoc::as_string(" : ChoiceEquality")),
))
.append(RcDoc::as_string(" (at level 2)."));
let fun_out_notation_0 = RcDoc::as_string("Notation")
.append(RcDoc::space())
.append(RcDoc::as_string("\"'"))
.append(
translate_ident(Ident::TopLevel(f.clone()))
.append(RcDoc::as_string("_out")),
)
.append(RcDoc::as_string("'\""))
.append(RcDoc::space())
.append(RcDoc::as_string(":="))
.append(make_paren(
rustspec_to_coq_ssprove_state::translate_base_typ(sig.ret.0.clone())
.append(RcDoc::as_string(" : choice_type")),
))
.append(RcDoc::as_string(" (in custom pack_type at level 2)."));
let fun_out_notation_1 = RcDoc::as_string("Notation")
.append(RcDoc::space())
.append(RcDoc::as_string("\"'"))
.append(
translate_ident(Ident::TopLevel(f.clone()))
.append(RcDoc::as_string("_out")),
)
.append(RcDoc::as_string("'\""))
.append(RcDoc::space())
.append(RcDoc::as_string(":="))
.append(make_paren(
rustspec_to_coq_ssprove_state::translate_base_typ(sig.ret.0.clone())
.append(RcDoc::as_string(" : ChoiceEquality")),
))
.append(RcDoc::as_string(" (at level 2)."));
let fun_ident_def = rustspec_to_coq_ssprove_state::make_definition(
RcDoc::as_string(f.clone().string.to_uppercase()),
Some(RcDoc::as_string("nat")),
RcDoc::as_string(fresh_codegen_id()),
);
let both_type = RcDoc::as_string("both")
.append(RcDoc::space())
.append(make_paren(block_vars.clone()))
.append(RcDoc::space())
.append(interface)
.append(RcDoc::space())
.append(make_paren(
rustspec_to_coq_ssprove_state::translate_base_typ(sig.ret.0.clone()),
));
let package_wraped_code_block = make_paren(
make_paren(block_exprs.group())
.append(RcDoc::space())
.append(":")
.append(RcDoc::space())
.append(both_type.clone()),
);
RcDoc::line()
.append(fun_inp_notation_0)
.append(RcDoc::line())
.append(fun_inp_notation_1)
.append(RcDoc::line())
.append(fun_out_notation_0)
.append(RcDoc::line())
.append(fun_out_notation_1)
.append(RcDoc::line())
.append(fun_ident_def)
.append(RcDoc::line())
.append(RcDoc::as_string("Program Definition "))
.append(rustspec_to_coq_ssprove_pure::make_definition_inner(
translate_ident(Ident::TopLevel(f.clone()))
.append(RcDoc::space())
.append(RcDoc::intersperse(
sig.args.iter().map(|((x, _), (tau, _))| {
make_paren(
translate_ident(x.clone())
.append(RcDoc::space())
.append(RcDoc::as_string(":"))
.append(RcDoc::space())
.append(
rustspec_to_coq_ssprove_state::translate_base_typ(
tau.clone().1 .0,
),
),
)
}),
RcDoc::space(),
))
.append(RcDoc::line())
.append(RcDoc::as_string(":"))
.append(RcDoc::space())
.append(both_type)
.group(),
None,
package_wraped_code_block.group(),
))
.append(RcDoc::hardline().append(RcDoc::as_string("Fail Next Obligation.")))
})
}
Item::EnumDecl(_, _) => rustspec_to_coq_ssprove_state::translate_item(item, top_ctx),
_ => rustspec_to_coq_ssprove_pure::translate_item(item, top_ctx),
}
}
fn translate_program<'a>(p: &'a Program, top_ctx: &'a TopLevelContext) -> RcDoc<'a, ()> {
RcDoc::concat(p.items.iter().map(|(i, _)| {
translate_item(i.clone(), top_ctx)
.append(RcDoc::hardline())
.append(RcDoc::hardline())
}))
}
pub fn translate_and_write_to_file(
sess: &Session,
p: &Program,
file: &str,
top_ctx: &TopLevelContext,
) {
let file = file.trim();
let path = path::Path::new(file);
let mut file = match File::create(&path) {
Err(why) => {
sess.err(format!("Unable to write to output file {}: \"{}\"", file, why).as_str());
return;
}
Ok(file) => file,
};
let width = 80;
let mut w = Vec::new();
write!(
file,
"(** This file was automatically generated using Hacspec **)\n\
Set Warnings \"-notation-overridden,-ambiguous-paths\".\n\
From Crypt Require Import choice_type Package Prelude.\n\
Import PackageNotation.\n\
From extructures Require Import ord fset.\n\
From mathcomp.word Require Import ssrZ word.\n\
From Jasmin Require Import word.\n\
\n\
From Coq Require Import ZArith.\n\
Import List.ListNotations.\n\
Open Scope list_scope.\n\
Open Scope Z_scope.\n\
Open Scope bool_scope.\n\
\n\
Require Import ChoiceEquality.\n\
Require Import LocationUtility.\n\
Require Import Hacspec_Lib_Comparable.\n\
Require Import Hacspec_Lib_Pre.\n\
Require Import Hacspec_Lib.\n\
\n\
Open Scope hacspec_scope.\n\n\
Obligation Tactic := try timeout 8 solve_ssprove_obligations.\n",
)
.unwrap();
translate_program(p, top_ctx).render(width, &mut w).unwrap();
write!(file, "{}", String::from_utf8(w).unwrap()).unwrap()
}
|
extern crate cc;
fn main() {
cc::Build::new()
.file("src/question1.s")
.file("src/question2a.s")
.file("src/question2b.s")
.file("src/question2c.s")
.compile("task1-lib");
} |
// This file is part of rdma-core. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT. No part of rdma-core, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2017 The developers of rdma-core. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/rdma-core/master/COPYRIGHT.
pub struct ProtectionDomain<'a>
{
pointer: *mut ibv_pd,
context: &'a Context,
}
impl<'a> Drop for ProtectionDomain<'a>
{
#[inline(always)]
fn drop(&mut self)
{
self.pointer.destroy();
}
}
impl<'a> ProtectionDomain<'a>
{
#[inline(always)]
fn new(pointer: *mut ibv_pd, context: &'a Context) -> Self
{
debug_assert!(!pointer.is_null(), "pointer is null");
Self
{
pointer: pointer,
context: context,
}
}
#[inline(always)]
pub fn registerMemoryRegion(&'a self, address: *mut c_void, length: usize, access: &MemoryRegionAccess) -> MemoryRegion<'a>
{
debug_assert!(!address.is_null(), "address can not be null");
debug_assert!(length != 0, "length can not be zero");
debug_assert!(length as u64 <= self.context.attributes().max_mr_size, "length '{}' exceeds device maximum '{}'", length, self.context.attributes().max_mr_size);
let pointer = panic_on_null!(ibv_reg_mr, self.pointer, address, length, access.as_c_int());
MemoryRegion
{
pointer: pointer,
lifetime: PhantomData,
}
}
// See: https://www.mankier.com/3/ibv_bind_mw
// Needs a queue pair, a memory region, a WorkRequestIdentifier, memory address & length, access flags, send flags
#[inline(always)]
pub fn allocateType1MemoryWindow(&'a self) -> MemoryWindow
{
self.allocateMemoryWindow(ibv_mw_type::IBV_MW_TYPE_1);
unimplemented!();
}
#[inline(always)]
fn allocateMemoryWindow(&'a self, memoryWindowType: ibv_mw_type) -> MemoryWindow
{
// ibv_post_send should be called for type 2
// ibv_bind_mw must be called for type 1 : pub fn rust_ibv_bind_mw(qp: *mut ibv_qp, mw: *mut ibv_mw, mw_bind: *mut ibv_mw_bind) -> c_int;
let pointer = panic_on_null!(rust_ibv_alloc_mw, self.pointer, memoryWindowType);
MemoryWindow
{
pointer: pointer,
protectionDomain: self,
}
}
// TODO: An extended shared recv queue also has a completion queue, and is only for valid for XRC queue pairs I suspect
// #[inline(always)]
// pub fn createUnextendedQueuePair<SendC: CompletionQueue, ReceiveC: CompletionQueue, SharedReceive: SharedReceiveQueue>(&'a self, sendCompletionQueue: &'a SendC, receiveCompletionQueue: &'a ReceiveC, sharedReceiveQueue: Option<&'a SharedReceive>, capabilities: ibv_qp_cap, eachWorkRequestSubmittedToTheSendCompletionQueueGeneratesACompletionEntry: bool) -> UnextendedQueuePair<'a, SendC, ReceiveC, SharedReceive>
// {
// let context = self.context;
// assert!(sendCompletionQueue.isValidForContext(context), "sendCompletionQueue is not valid for this protection domain's context");
// assert!(receiveCompletionQueue.isValidForContext(context), "receiveCompletionQueue is not valid for this protection domain's context");
//
// let mut attributes = ibv_qp_init_attr
// {
// qp_context: null_mut(),
// send_cq: sendCompletionQueue.pointer(),
// recv_cq: receiveCompletionQueue.pointer(),
// srq: match sharedReceiveQueue
// {
// None => null_mut(),
// Some(sharedReceiveQueue) =>
// {
// assert!(sharedReceiveQueue.isValidForProtectionDomain(self), "shared receive queue is not valid for this protection domain");
//
// sharedReceiveQueue.pointer()
// },
// },
// cap: capabilities,
// qp_type: ibv_qp_type::IBV_QPT_RC,
// sq_sig_all: if unlikely(eachWorkRequestSubmittedToTheSendCompletionQueueGeneratesACompletionEntry)
// {
// 1
// }
// else
// {
// 0
// },
// };
//
// /*
// IBV_QPT_RC = 2,
// IBV_QPT_UC = 3,
// IBV_QPT_UD = 4,
// IBV_QPT_RAW_PACKET = 8,
// IBV_QPT_XRC_SEND = 9,
// IBV_QPT_XRC_RECV = 10,
// */
//
// let pointer = panic_on_null!(ibv_create_qp, self.pointer, &mut attributes);
// UnextendedQueuePair::new(pointer, attributes.cap, (self, sendCompletionQueue, receiveCompletionQueue, sharedReceiveQueue))
//
// /*
// pub fn rust_ibv_create_qp_ex(context: *mut ibv_context, qp_init_attr_ex: *mut ibv_qp_init_attr_ex) -> *mut ibv_qp;
// pub fn rust_ibv_open_qp(context: *mut ibv_context, qp_open_attr: *mut ibv_qp_open_attr) -> *mut ibv_qp;
//
//
// */
// }
}
|
pub fn greet() {
println!("Hello world!");
}
|
use std::{
iter::FromIterator,
ops::{Index, IndexMut},
};
pub use num_traits::{One, Zero};
pub mod dim;
use dim::{Dim, Fixed};
pub mod view;
pub mod iter;
pub mod prelude {
pub use crate::{
dim,
dim::Dim,
mat,
ops::ViewOps,
view::{col::ColumnView, row::RowView, View},
Matrix,
};
}
#[derive(Clone, Debug)]
pub struct Matrix<T, M, N> {
m: M,
n: N,
items: Box<[T]>,
}
impl<T, M1, M2, N1, N2> PartialEq<Matrix<T, M2, N2>> for Matrix<T, M1, N1>
where
T: PartialEq,
M1: Dim + PartialEq<M2>,
M2: Dim,
N1: Dim + PartialEq<N2>,
N2: Dim,
{
fn eq(&self, rhs: &Matrix<T, M2, N2>) -> bool {
if self.m != rhs.m || self.n != rhs.n {
return false;
}
self.items.eq(&rhs.items)
}
}
impl<T: Eq, M: Dim + Eq, N: Dim + Eq> Eq for Matrix<T, M, N> {}
#[derive(thiserror::Error, PartialEq, Eq)]
#[error("not enough items to fill a {m}x{n} matrix; iterator only contained {num} items")]
pub struct FromIterError {
m: usize,
n: usize,
num: usize,
}
impl std::fmt::Debug for FromIterError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
std::fmt::Display::fmt(self, f)
}
}
impl<T, M: Dim, N: Dim> Matrix<T, M, N> {
#[inline]
pub fn default_with_dim(m: M, n: N) -> Self
where
T: Default,
{
Self::from_fn_with_dim(m, n, |_, _| T::default())
}
pub fn from_fn_with_dim(m: M, n: N, mut f: impl FnMut(usize, usize) -> T) -> Self {
let mu = m.dim();
let nu = n.dim();
let len = mu * nu;
let mut items = Vec::with_capacity(len);
items.extend(
(0..mu)
.flat_map(|m| (0..nu).map(move |n| (m, n)))
.map(|(m, n)| f(m, n)),
);
debug_assert_eq!(items.len(), len);
Self {
m,
n,
items: items.into_boxed_slice(),
}
}
/// Creates a new matrix with specified dimensions, populating its entries with the item of an iterator.
/// Fails if the iterator does not contain enough items to completely fill it.
/// ```
/// # use safemat::*;
/// // Creating a matrix from an iterator...
/// let iter = vec![1, 2, 3, 4].into_iter();
/// assert_eq!(
/// Matrix::try_from_iter_with_dim(dim!(1), dim!(4), iter),
/// Ok(mat![1, 2, 3, 4])
/// );
///
/// // It will fail if there's not enough items.
/// let iter = vec![1, 2].into_iter();
/// assert!(matches!(
/// Matrix::try_from_iter_with_dim(dim!(1), dim!(4), iter),
/// Err(_)
/// ));
///
/// // Any extra items will be ignored.
/// let iter = vec![1, 2, 3, 4, 5].into_iter();
/// assert_eq!(
/// Matrix::try_from_iter_with_dim(dim!(1), dim!(3), iter),
/// Ok(mat![1, 2, 3])
/// )
/// ```
pub fn try_from_iter_with_dim(
m: M,
n: N,
iter: impl IntoIterator<Item = T>,
) -> Result<Self, FromIterError> {
let len = m.dim() * n.dim();
let mut items = Vec::with_capacity(len);
items.extend(iter.into_iter().take(len));
if items.len() < len {
return Err(FromIterError {
m: m.dim(),
n: n.dim(),
num: items.len(),
});
}
Ok(Self {
m,
n,
items: items.into_boxed_slice(),
})
}
#[inline]
pub fn get(&self, i: usize, j: usize) -> Option<&T> {
if i < self.m.dim() && j < self.n.dim() {
Some(&self.items[i * self.n.dim() + j])
} else {
None
}
}
}
impl<T: Zero + One, N: Dim> Matrix<T, N, N> {
/// Returns the identity matrix of specified size.
///
/// ```
/// # use safemat::*;
/// let i = Matrix::id_with_dim(dim!(3));
/// assert_eq!(i, Matrix::from_array([
/// [1, 0, 0],
/// [0, 1, 0],
/// [0, 0, 1],
/// ]));
/// ```
pub fn id_with_dim(n: N) -> Self {
Self::from_fn_with_dim(n, n, |i, j| if i == j { T::one() } else { T::zero() })
}
}
pub type FixedMat<T, const M: usize, const N: usize> = Matrix<T, Fixed<M>, Fixed<N>>;
impl<T: Default, const M: usize, const N: usize> Default for FixedMat<T, M, N> {
#[inline]
fn default() -> Self {
Self::default_with_dim(Fixed::<M>, Fixed::<N>)
}
}
impl<T, const M: usize, const N: usize> FixedMat<T, M, N> {
#[inline]
pub fn from_fn(f: impl FnMut(usize, usize) -> T) -> Self {
Self::from_fn_with_dim(Fixed::<M>, Fixed::<N>, f)
}
pub fn from_array(ary: [[T; N]; M]) -> Self {
use std::array::IntoIter;
let mut items = Vec::with_capacity(M * N);
items.extend(IntoIter::new(ary).flat_map(IntoIter::new));
debug_assert_eq!(items.len(), M * N);
Self {
m: Fixed::<M>,
n: Fixed::<N>,
items: items.into_boxed_slice(),
}
}
#[inline]
pub fn try_from_iter(iter: impl IntoIterator<Item = T>) -> Result<Self, FromIterError> {
Self::try_from_iter_with_dim(Fixed, Fixed, iter)
}
}
impl<T, const M: usize, const N: usize> FromIterator<T> for FixedMat<T, M, N> {
#[inline]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
Self::try_from_iter(iter).unwrap()
}
}
/// Type alias for a column vector.
pub type Vector<T, M> = Matrix<T, M, dim!(1)>;
/// Type alias for a column vector with fixed length.
pub type FixedVec<T, const M: usize> = Vector<T, Fixed<M>>;
pub type RowVec<T, N> = Matrix<T, dim!(1), N>;
pub type FixedRowVec<T, const N: usize> = RowVec<T, Fixed<N>>;
pub mod ops;
impl<T, M: Dim, N: Dim> Index<usize> for Matrix<T, M, N> {
type Output = T;
#[inline]
fn index(&self, i: usize) -> &T {
&self.items[i]
}
}
impl<T, M: Dim, N: Dim> Index<usize> for &'_ Matrix<T, M, N> {
type Output = T;
#[inline]
fn index(&self, i: usize) -> &T {
&self.items[i]
}
}
impl<T, M: Dim, N: Dim> IndexMut<usize> for Matrix<T, M, N> {
#[inline]
fn index_mut(&mut self, i: usize) -> &mut T {
&mut self.items[i]
}
}
impl<T, M: Dim, N: Dim> Index<[usize; 2]> for Matrix<T, M, N> {
type Output = T;
#[inline]
fn index(&self, [i, j]: [usize; 2]) -> &T {
assert!(i < self.m.dim());
assert!(j < self.n.dim());
&self.items[i * self.n.dim() + j]
}
}
impl<T, M: Dim, N: Dim> Index<[usize; 2]> for &'_ Matrix<T, M, N> {
type Output = T;
#[inline]
fn index(&self, i: [usize; 2]) -> &T {
Matrix::index(self, i)
}
}
impl<T, M: Dim, N: Dim> IndexMut<[usize; 2]> for Matrix<T, M, N> {
fn index_mut(&mut self, [i, j]: [usize; 2]) -> &mut T {
assert!(i < self.m.dim());
assert!(j < self.n.dim());
&mut self.items[i * self.n.dim() + j]
}
}
/// Provides something similar to a macro literal syntax.
/// Rows are delineated with a semicolon ;
///
/// ```
/// # use safemat::*;
/// let a = mat![1, 2, 3 ; 4, 5, 6];
/// let b = Matrix::from_array([
/// [1, 2, 3],
/// [4, 5, 6],
/// ]);
/// assert_eq!(a, b);
/// ```
#[macro_export]
macro_rules! mat {
($($($itm: expr),*);*) => {
Matrix::from_array([
$([$($itm),*]),*
])
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn mat_macro() {
let a = mat![1, 2, 3 ; 4, 5, 6];
assert_eq!(a, Matrix::from_array([[1, 2, 3], [4, 5, 6]]));
}
#[test]
fn from_ary() {
let mat = Matrix::from_array([[1, 2, 3], [4, 5, 6]]);
assert_eq!(mat[[0, 0]], 1);
assert_eq!(mat[[0, 1]], 2);
assert_eq!(mat[[0, 2]], 3);
assert_eq!(mat[[1, 0]], 4);
assert_eq!(mat[[1, 1]], 5);
assert_eq!(mat[[1, 2]], 6);
}
#[test]
fn transpose() {
let mat = mat![1, 2, 3 ; 4, 5, 6];
eprintln!("{:?}", mat);
let mat = mat.transpose();
eprintln!("{:?}", mat);
assert_eq!(mat[[0, 0]], 1);
assert_eq!(mat[[1, 0]], 2);
assert_eq!(mat[[2, 0]], 3);
assert_eq!(mat[[0, 1]], 4);
assert_eq!(mat[[1, 1]], 5);
assert_eq!(mat[[2, 1]], 6);
}
#[test]
fn mul_fixed() {
let a = mat![1, 2, 3];
let b = mat![4; 5; 6];
let c = &a * &b;
assert_eq!(c, mat![32]);
}
#[test]
fn vcat() {
let a = mat![1, 2, 3];
let b = mat![4, 5, 6];
let c = a.vcat(b);
assert_eq!(c, mat![1, 2, 3 ; 4, 5, 6]);
}
}
|
use util::{
bitfields,
bits::Bits,
fixedpoint::{FixedPoint16, FixedPoint32},
mem::read_u16,
primitive_enum,
};
use super::line::LineBuffer;
use crate::{
memory::{
io::{IoRegisters, ObjCharVramMapping},
OAM_SIZE, VRAM_SIZE,
},
video::line::{PixelAttrs, OBJ},
};
pub fn render(
line: u16,
buf: &mut LineBuffer,
ioregs: &IoRegisters,
oam: &[u8; OAM_SIZE as usize],
vram: &[u8; VRAM_SIZE as usize],
) {
let mut cycles = if ioregs.dispcnt.hblank_interval_free() {
954
} else {
1210
};
let mut objects = [0u8; 128];
let mut visible_objects = 0;
let mut disabled_idx = 128;
(0..128).rev().for_each(|obj_idx| {
let attrs_index = obj_idx as usize * 8;
let attr0 = ObjAttr0::new(read_u16(oam, attrs_index));
if attr0.disabled() && !attr0.rotscale() {
disabled_idx -= 1;
objects[disabled_idx] = 0xFF;
return;
}
objects[visible_objects] = obj_idx as u8;
visible_objects += 1;
});
let objects = &mut objects[..visible_objects];
objects.sort_unstable_by(|&ol, &or| {
let attrs_index_lhs = ol as usize * 8;
let attr2_lhs = ObjAttr2::new(read_u16(oam, attrs_index_lhs + 4));
let attrs_index_rhs = or as usize * 8;
let attr2_rhs = ObjAttr2::new(read_u16(oam, attrs_index_rhs + 4));
attr2_rhs
.priority()
.cmp(&attr2_lhs.priority())
.then(or.cmp(&ol))
});
for &mut obj_idx in objects {
if cycles == 0 {
break;
}
let attrs_index = obj_idx as usize * 8;
let attr0 = ObjAttr0::new(read_u16(oam, attrs_index));
let attr1 = ObjAttr1::new(read_u16(oam, attrs_index + 2));
let attr2 = ObjAttr2::new(read_u16(oam, attrs_index + 4));
let (width, height) = attr1.size(attr0.shape());
let (display_width, display_height) = if attr0.rotscale() && attr0.double_size() {
(width * 2, height * 2)
} else {
(width, height)
};
let mut left = attr1.x();
let top = attr0.y();
let bottom = (top + display_height - 1) % 256;
let in_bounds_vertical = top <= bottom && top <= line && bottom >= line;
let in_bounds_vertical_wrapped = top > bottom && (top <= line || bottom >= line);
if !in_bounds_vertical && !in_bounds_vertical_wrapped {
continue;
}
let mut right = if let Some(right) =
consume_obj_cycles(&mut cycles, attr0.rotscale(), display_width, left)
{
right
} else {
break;
};
let in_bounds_horizontal = left < 240 || right < 240;
if !in_bounds_horizontal {
continue;
}
let origin_x = FixedPoint32::from(display_width / 2);
let origin_y = FixedPoint32::from(display_height / 2);
let xdraw_start;
if left < right {
xdraw_start = FixedPoint32::from(0u32);
right = right.min(239);
} else {
// we have wrapped here so we need to start drawing farther to the right
// of the object, but there will always be enough space on screen to draw the
// object to the end.
left = 0;
xdraw_start = FixedPoint32::from(display_width - right - 1);
}
let ydraw_start = FixedPoint32::from(if line > bottom {
line - top
} else {
display_height - (bottom - line) - 1
});
let mut xdraw_start_distance = xdraw_start - origin_x;
let mut ydraw_start_distance = ydraw_start - origin_y;
let (dx, dmx, dy, dmy);
if attr0.rotscale() {
let params_idx = attr1.rotscale_param() as usize;
dx = FixedPoint32::from(FixedPoint16::raw(
(read_u16(oam, 0x06 + (params_idx * 32))) as i16,
));
dmx = FixedPoint32::from(FixedPoint16::raw(
(read_u16(oam, 0x0E + (params_idx * 32))) as i16,
));
dy = FixedPoint32::from(FixedPoint16::raw(
(read_u16(oam, 0x16 + (params_idx * 32))) as i16,
));
dmy = FixedPoint32::from(FixedPoint16::raw(
(read_u16(oam, 0x1E + (params_idx * 32))) as i16,
));
} else {
dy = FixedPoint32::from(0u32);
dmx = FixedPoint32::from(0u32);
dmy = FixedPoint32::from(1u32);
if attr1.horizontal_flip() {
dx = FixedPoint32::from(-1i32);
// NOTE: add 1 so that we start on the other side of the center line.
xdraw_start_distance += FixedPoint32::from(1u32);
} else {
dx = FixedPoint32::from(1u32);
}
if attr1.vertical_flip() {
ydraw_start_distance = -ydraw_start_distance;
}
}
// Down here we use the real width and height for the origin instead of the double sized
// because I randomly wrote it and it works. Maybe one day I'll actually do the math and
// come up with an exact reason as to why. For now I just had a feeling and I was right.
let mut x = FixedPoint32::from(width / 2)
+ (ydraw_start_distance * dmx)
+ (xdraw_start_distance * dx);
let mut y = FixedPoint32::from(height / 2)
+ (ydraw_start_distance * dmy)
+ (xdraw_start_distance * dy);
let tile_data = &vram[0x10000..];
// The number of characters (tiles) we have to jump to reach the next
// line of the object.
let char_stride: usize =
if ioregs.dispcnt.obj_char_vram_mapping() == ObjCharVramMapping::OneDimensional {
width as usize / 8
} else if attr0.palette256() {
16
} else {
32
};
let first_tile_index = attr2.character_name() as usize;
let mode = attr0.mode();
let mut attrs = PixelAttrs::default();
if ioregs.bldcnt.is_first_target(OBJ) {
attrs.set_first_target();
}
if ioregs.bldcnt.is_second_target(OBJ) {
attrs.set_second_target();
}
if mode == ObjMode::SemiTransparent {
attrs.set_semi_transparent();
}
let mut mosaic_x = 0usize;
let mut mosaic_y = 0usize;
if attr0.mosaic() {
mosaic_x = ioregs.mosaic.obj_h() as usize + 1;
mosaic_y = ioregs.mosaic.obj_v() as usize + 1;
}
if attr0.palette256() {
const BYTES_PER_LINE: usize = 8;
const BYTES_PER_TILE: usize = 64;
let width = width as usize;
let height = height as usize;
attrs.set_8bpp();
attrs.set_priority(attr2.priority());
for screen_x in left as usize..=right as usize {
let mut xi = x.integer() as usize;
let mut yi = y.integer() as usize;
if xi < width && yi < height {
if mosaic_x > 0 {
xi -= xi % mosaic_x;
}
if mosaic_y > 0 {
yi -= yi % mosaic_y;
}
// FIXME Lower bit of the tile should be ignored. From GBATEK:
//
// When using the 256 Colors/1 Palette mode, only each second tile may be used,
// the lower bit of the tile number should be zero (in 2-dimensional mapping mode,
// the bit is completely ignored).
let tile =
(((first_tile_index / 2) as usize) + ((yi / 8) * char_stride) + (xi / 8))
& 0x3FF;
// When using BG Mode 3-5 (Bitmap Modes), only tile numbers 512-1023 may be used.
// That is because lower 16K of OBJ memory are used for BG. Attempts to use tiles 0-511 are ignored (not displayed).
if tile < 512 && ioregs.dispcnt.is_bitmap_mode() {
continue;
}
let pixel_offset =
(tile * BYTES_PER_TILE) + ((yi % 8) * BYTES_PER_LINE) + (xi % 8);
let entry = tile_data[pixel_offset as usize];
if mode != ObjMode::ObjWindow {
buf.put_obj_8bpp(attrs, screen_x, entry);
} else if entry != 0 {
buf.put_obj_window(screen_x);
}
}
x += dx;
y += dy;
}
} else {
const BYTES_PER_LINE: usize = 4;
const BYTES_PER_TILE: usize = 32;
let width = width as usize;
let height = height as usize;
attrs.set_4bpp();
attrs.set_priority(attr2.priority());
for screen_x in left as usize..=right as usize {
let mut xi = x.integer() as usize;
let mut yi = y.integer() as usize;
if xi < width && yi < height {
if mosaic_x > 0 {
xi -= xi % mosaic_x;
}
if mosaic_y > 0 {
yi -= yi % mosaic_y;
}
let tile = (first_tile_index + ((yi / 8) * char_stride) + (xi / 8)) & 0x3FF;
let pixel_offset =
(tile * BYTES_PER_TILE) + ((yi % 8) * BYTES_PER_LINE) + (xi % 8) / 2;
let entry = (tile_data[pixel_offset] >> ((xi % 2) << 2)) & 0xF;
if mode != ObjMode::ObjWindow {
buf.put_obj_4bpp(attrs, screen_x, attr2.palette() as _, entry);
} else if entry != 0 {
buf.put_obj_window(screen_x);
}
}
x += dx;
y += dy;
}
}
}
}
/// Consumes the cycles required to render and object line and returns the rightmost pixel's
/// x position if there were enough cycles to render an object at all.
fn consume_obj_cycles(cycles: &mut u16, rs: bool, width: u16, left: u16) -> Option<u16> {
let right;
if rs {
// affine objects require 10 cycles to start
*cycles = cycles.saturating_sub(10);
if *cycles == 0 {
return None;
}
if width * 2 > *cycles {
right = (left + (*cycles / 2)) % 512;
*cycles = 0;
} else {
right = (left + width) % 512;
*cycles -= width * 2;
}
} else if width > *cycles {
right = (left + *cycles - 1) % 512;
*cycles = 0;
} else {
right = (left + width - 1) % 512;
*cycles -= width;
}
Some(right)
}
bitfields! {
struct ObjAttr0: u16 {
[0,7] y, set_y: u16,
[8] rotscale, set_rotscale: bool,
// When rotscale flag is set:
[9] double_size, set_double_size: bool,
// When rotscale flag is clear:
[9] disabled, set_disabled: bool,
[10,11] mode, set_mode: ObjMode,
[12] mosaic, set_mosaic: bool,
[13] palette256, set_palette256: bool,
[14,15] shape, set_shape: ObjShape,
}
}
bitfields! {
struct ObjAttr1: u16 {
[0,8] x, set_x: u16,
// When rotsccale flag is set in attr0:
[9,13] rotscale_param, set_rotscale_param: u16,
// When rotscale flag is clear in attr0:
[12] horizontal_flip, set_horizontal: bool,
[13] vertical_flip, set_vertical_flip: bool,
}
}
impl ObjAttr1 {
pub fn size(&self, shape: ObjShape) -> (u16, u16) {
// Size Square Horizontal Vertical
// 0 8x8 16x8 8x16
// 1 16x16 32x8 8x32
// 2 32x32 32x16 16x32
// 3 64x64 64x32 32x64
match (self.value.bits(14, 15), shape) {
(0, ObjShape::Square) => (8, 8),
(1, ObjShape::Square) => (16, 16),
(2, ObjShape::Square) => (32, 32),
(3, ObjShape::Square) => (64, 64),
(0, ObjShape::Horizontal) => (16, 8),
(1, ObjShape::Horizontal) => (32, 8),
(2, ObjShape::Horizontal) => (32, 16),
(3, ObjShape::Horizontal) => (64, 32),
(0, ObjShape::Vertical) => (8, 16),
(1, ObjShape::Vertical) => (8, 32),
(2, ObjShape::Vertical) => (16, 32),
(3, ObjShape::Vertical) => (32, 64),
_ => (8, 8),
}
}
}
bitfields! {
struct ObjAttr2: u16 {
[0,9] character_name, set_character_name: u16,
[10,11] priority, set_priority: u16,
[12,15] palette, set_palette: u16,
}
}
primitive_enum! {
pub enum ObjMode: u16 {
Normal,
SemiTransparent,
ObjWindow,
Invalid,
}
}
primitive_enum! {
pub enum ObjShape: u16 {
Square,
Horizontal,
Vertical,
Prohibited,
}
}
|
use std::{
collections::{HashMap, VecDeque},
sync::Arc,
};
use anyhow::Result;
use crossbeam::atomic::AtomicCell;
use handlegraph::{
handle::{Handle, NodeId},
pathhandlegraph::*,
};
use handlegraph::packedgraph::paths::StepPtr;
use bstr::ByteSlice;
use parking_lot::Mutex;
use rustc_hash::{FxHashMap, FxHashSet};
use crate::quad_tree::QuadTree;
use crate::{app::SharedState, gui::text::LabelPos};
use crate::{geometry::*, universe::Node, view::*};
use nalgebra_glm as glm;
pub mod bed;
pub mod gff;
pub use bed::*;
pub use gff::*;
#[derive(Debug, Default, Clone)]
pub struct Label {
id: usize,
text: String,
}
// #[derive(Debug, Default, Clone)]
#[derive(Default, Clone)]
pub struct LabelSet {
positions: Vec<LabelPos>,
labels: Vec<Label>,
}
impl LabelSet {
pub fn add_at_world_point(
&mut self,
point: Point,
label_id: usize,
text: &str,
offset: Option<Point>,
) {
let pos = LabelPos::World { point, offset };
self.positions.push(pos);
self.labels.push(Label {
id: label_id,
text: text.to_string(),
});
}
pub fn add_at_handle(
&mut self,
handle: Handle,
label_id: usize,
text: &str,
) {
let pos = LabelPos::Handle {
handle,
offset: None,
};
self.positions.push(pos);
self.labels.push(Label {
id: label_id,
text: text.to_string(),
});
}
pub fn add_at_node(&mut self, node: NodeId, label_id: usize, text: &str) {
let handle = Handle::pack(node, false);
self.add_at_handle(handle, label_id, text);
}
pub fn add_many_at<'a, 'b>(
&'a mut self,
pos: LabelPos,
labels: impl Iterator<Item = (usize, &'b str)>,
) {
for (label_id, text) in labels {
self.positions.push(pos);
self.labels.push(Label {
id: label_id,
text: text.to_string(),
});
}
}
pub fn len(&self) -> usize {
self.positions.len()
}
pub fn is_empty(&self) -> bool {
self.positions.is_empty()
}
}
#[derive(Debug, Default, Clone, Copy)]
pub struct LabelHandlers {
hover_handler: Option<usize>,
click_handler: Option<usize>,
}
#[derive(Debug, Default, Clone)]
struct Cluster {
offset: Option<Point>,
labels: Vec<(Label, LabelHandlers)>,
// labels: Vec<(Label, Option<usize>)>,
}
// #[derive(Clone)]
pub struct ClusterTree {
clusters: QuadTree<Cluster>,
}
impl ClusterTree {
pub fn from_boundary(boundary: Rect) -> Self {
Self {
clusters: QuadTree::new(boundary),
}
}
pub fn from_label_tree(
tree: &QuadTree<(Option<Point>, Label, LabelHandlers)>,
label_radius: f32,
scale: f32,
) -> Self {
let mut result = Self::from_boundary(tree.boundary());
result.insert_label_tree(tree, label_radius, scale);
result
}
pub fn insert_label_tree(
&mut self,
tree: &QuadTree<(Option<Point>, Label, LabelHandlers)>,
label_radius: f32,
scale: f32,
) {
let radius = label_radius * scale;
let clusters = &mut self.clusters;
for leaf in tree.leaves() {
for (point, (offset, label, handlers)) in leaf.elems() {
// use the closest cluster if it exists and is within the radius
if let Some(mut cluster) = clusters
.nearest_mut(point)
.filter(|c| c.point().dist(point) <= radius)
{
let cmut = cluster.data_mut();
cmut.labels.push((label.to_owned(), *handlers))
} else {
let new_cluster = Cluster {
offset: *offset,
labels: vec![(label.to_owned(), *handlers)],
};
let _result = clusters.insert(point, new_cluster);
}
}
}
}
pub fn draw_labels(
&self,
label_sets: &Labels,
ctx: &egui::CtxRef,
shared_state: &SharedState,
) {
let view = shared_state.view();
let mouse_pos = shared_state.mouse_pos();
let mut interacted: Option<(usize, LabelHandlers)> = None;
let mut label_rect: Option<Rect> = None;
let mut hovered = false;
let mut clicked = false;
for leaf in self.clusters.leaves() {
for (origin, cluster) in leaf.elems() {
let mut y_offset = 0.0;
let mut count = 0;
let offset = cluster.offset.unwrap_or_default();
let anchor_dir = Point::new(-offset.x, -offset.y);
let offset = offset * 20.0;
let labels = &cluster.labels;
for (label, handlers) in cluster.labels.iter() {
// for some reason, the returned rectangle is too tall
let rect =
crate::gui::text::draw_text_at_world_point_offset(
ctx,
view,
origin,
offset + Point::new(0.0, y_offset),
&label.text,
)
.map(|r| r.resize(1.01));
if let Some(rect) = rect {
let rect = rect.resize(0.98);
if rect.contains(mouse_pos) {
label_rect = Some(rect);
interacted = Some((label.id, *handlers));
hovered = true;
// this still needs to be fixed to only
// use left clicks
if ctx.input().pointer.any_click() {
clicked = true;
}
}
}
y_offset += 15.0;
count += 1;
if count > 10 {
let count = count.min(labels.len());
let rem = labels.len() - count;
if rem > 0 {
let more_label = format!("and {} more", rem);
crate::gui::text::draw_text_at_world_point_offset(
ctx,
view,
origin,
offset + Point::new(0.0, y_offset),
&more_label,
);
}
break;
}
}
}
}
if hovered || clicked {
if let Some((label_id, handlers)) = interacted {
if clicked {
if let Some(on_click) = handlers
.click_handler
.and_then(|id| label_sets.click_handlers.get(&id))
{
on_click(label_id);
}
}
if hovered {
if let Some(rect) = label_rect {
crate::gui::text::draw_rect(ctx, rect);
}
if let Some(on_hover) = handlers
.hover_handler
.and_then(|id| label_sets.hover_handlers.get(&id))
{
on_hover(label_id);
}
}
}
}
}
pub fn draw_clusters(
&self,
ctx: &egui::CtxRef,
view: View,
label_radius: f32,
) {
for leaf in self.clusters.leaves() {
for (point, _cluster) in leaf.elems() {
crate::gui::text::draw_circle_world(
ctx,
view,
point,
label_radius,
None,
);
}
}
}
}
#[derive(Default)]
pub struct Labels {
label_trees:
HashMap<String, QuadTree<(Option<Point>, Label, LabelHandlers)>>,
visible: HashMap<String, AtomicCell<bool>>,
hover_handlers:
FxHashMap<usize, Arc<dyn Fn(usize) + Send + Sync + 'static>>,
click_handlers:
FxHashMap<usize, Arc<dyn Fn(usize) + Send + Sync + 'static>>,
next_hover_id: usize,
next_click_id: usize,
}
impl Labels {
pub fn label_sets(
&self,
) -> &HashMap<String, QuadTree<(Option<Point>, Label, LabelHandlers)>> {
&self.label_trees
}
pub fn visible(&self, name: &str) -> Option<&AtomicCell<bool>> {
self.visible.get(name)
}
pub fn add_label_set(
&mut self,
boundary: Rect,
nodes: &[Node],
name: &str,
labels: &LabelSet,
on_label_click: Option<Arc<dyn Fn(usize) + Send + Sync + 'static>>,
) {
let name = name.to_string();
let mut label_tree: QuadTree<(Option<Point>, Label, LabelHandlers)> =
QuadTree::new(boundary);
let click_handler = if let Some(on_click) = on_label_click {
let id = self.next_click_id;
self.next_click_id += 1;
self.click_handlers.insert(id, on_click);
Some(id)
} else {
None
};
let handlers = LabelHandlers {
hover_handler: None,
click_handler,
};
for (&label_pos, label) in
labels.positions.iter().zip(labels.labels.iter())
{
let world = label_pos.world(nodes);
let offset = label_pos.offset(nodes);
let result =
label_tree.insert(world, (offset, label.to_owned(), handlers));
if result.is_err() {
log::warn!("label could not be inserted into quadtree");
}
}
self.label_trees.insert(name.clone(), label_tree);
self.visible.insert(name, true.into());
}
pub fn cluster(
&self,
boundary: Rect,
label_radius: f32,
view: View,
) -> ClusterTree {
let mut clusters = ClusterTree::from_boundary(boundary);
for (name, tree) in self.label_trees.iter() {
if self.visible(name).map(|v| v.load()).unwrap_or_default() {
let _result =
clusters.insert_label_tree(&tree, label_radius, view.scale);
}
}
clusters
}
}
#[derive(Debug, Clone)]
pub struct AnnotationLabelSet {
pub annotation_name: String,
pub label_set_name: String,
pub column_str: String,
pub column: AnnotationColumn,
pub path_id: PathId,
pub path_name: String,
show: Arc<AtomicCell<bool>>,
label_strings: Vec<String>,
labels: FxHashMap<NodeId, Vec<usize>>,
}
impl AnnotationLabelSet {
pub fn label_set(&self) -> LabelSet {
let mut labels = LabelSet::default();
let mut label_id = 0;
for (node, label_indices) in self.labels.iter() {
for &ix in label_indices.iter() {
let text = &self.label_strings[ix];
labels.add_at_node(*node, label_id, text);
label_id += 1;
}
}
labels
}
pub fn new<C, R, K>(
annotations: &C,
path_id: PathId,
path_name: &[u8],
column: &K,
label_set_name: &str,
label_strings: Vec<String>,
labels: FxHashMap<NodeId, Vec<usize>>,
) -> Self
where
C: AnnotationCollection<ColumnKey = K, Record = R>,
R: AnnotationRecord<ColumnKey = K>,
K: ColumnKey,
{
let annotation_name = annotations.file_name().to_string();
let column_str = column.to_string();
let path_name = path_name.to_str().unwrap().to_string();
let show = Arc::new(true.into());
let column = C::wrap_column(column.to_owned());
let label_set_name = label_set_name.to_owned();
Self {
annotation_name,
label_set_name,
column_str,
column,
path_name,
show,
path_id,
label_strings,
labels,
}
}
pub fn name(&self) -> &str {
&self.label_set_name
}
pub fn label_strings(&self) -> &[String] {
&self.label_strings
}
pub fn labels(&self) -> &FxHashMap<NodeId, Vec<usize>> {
&self.labels
}
pub fn is_visible(&self) -> bool {
self.show.load()
}
pub fn set_visibility(&self, to: bool) {
self.show.store(to);
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum AnnotationFileType {
Gff3,
Bed,
}
#[derive(Debug, Clone, PartialEq, PartialOrd)]
pub enum AnnotationColumn {
Gff3(Gff3Column),
Bed(BedColumn),
}
#[derive(Default, Clone)]
pub struct Annotations {
annot_names: Vec<(String, AnnotationFileType)>,
// gff3_annot_names: Vec<(String>,
gff3_annotations: HashMap<String, Arc<Gff3Records>>,
bed_annotations: HashMap<String, Arc<BedRecords>>,
label_sets: HashMap<String, Arc<AnnotationLabelSet>>,
annotation_default_ref_path: HashMap<String, PathId>,
}
impl Annotations {
pub fn annot_names(&self) -> &[(String, AnnotationFileType)] {
&self.annot_names
}
pub fn get_default_ref_path(&self, annot: &str) -> Option<PathId> {
self.annotation_default_ref_path.get(annot).copied()
}
pub fn insert_gff3_arc(&mut self, name: &str, records: Arc<Gff3Records>) {
self.gff3_annotations.insert(name.to_string(), records);
self.annot_names
.push((name.to_string(), AnnotationFileType::Gff3));
}
pub fn insert_gff3(&mut self, name: &str, records: Gff3Records) {
let records = Arc::new(records);
self.gff3_annotations.insert(name.to_string(), records);
self.annot_names
.push((name.to_string(), AnnotationFileType::Gff3));
}
pub fn remove_gff3(&mut self, name: &str) {
self.gff3_annotations.remove(name);
self.annot_names.retain(|(n, _)| n != name);
}
pub fn get_gff3(&self, name: &str) -> Option<&Arc<Gff3Records>> {
self.gff3_annotations.get(name)
}
pub fn insert_bed_arc(&mut self, name: &str, records: Arc<BedRecords>) {
self.bed_annotations.insert(name.to_string(), records);
self.annot_names
.push((name.to_string(), AnnotationFileType::Bed));
}
pub fn insert_bed(&mut self, name: &str, records: BedRecords) {
let records = Arc::new(records);
self.bed_annotations.insert(name.to_string(), records);
self.annot_names
.push((name.to_string(), AnnotationFileType::Bed));
}
pub fn remove_bed(&mut self, name: &str) {
self.bed_annotations.remove(name);
self.annot_names.retain(|(n, _)| n != name);
}
pub fn get_bed(&self, name: &str) -> Option<&Arc<BedRecords>> {
self.bed_annotations.get(name)
}
pub fn insert_label_set(
&mut self,
name: &str,
label_set: AnnotationLabelSet,
) {
self.label_sets
.insert(name.to_string(), Arc::new(label_set));
}
pub fn get_label_set(
&mut self,
name: &str,
) -> Option<&Arc<AnnotationLabelSet>> {
self.label_sets.get(name)
}
pub fn visible_label_sets(
&self,
) -> impl Iterator<Item = &'_ Arc<AnnotationLabelSet>> + '_ {
self.label_sets.values().filter(|ls| ls.is_visible())
}
pub fn label_sets(&self) -> &HashMap<String, Arc<AnnotationLabelSet>> {
&self.label_sets
}
}
pub trait ColumnKey:
Clone + Eq + Ord + std::hash::Hash + std::fmt::Display + Send + Sync
{
fn is_column_optional(key: &Self) -> bool;
fn seq_id() -> Self;
fn start() -> Self;
fn end() -> Self;
}
pub trait AnnotationRecord {
type ColumnKey: ColumnKey;
fn columns(&self) -> Vec<Self::ColumnKey>;
fn seq_id(&self) -> &[u8];
fn start(&self) -> usize;
fn end(&self) -> usize;
fn range(&self) -> (usize, usize) {
(self.start(), self.end())
}
fn score(&self) -> Option<f64>;
/// Get the value of one of the columns, other than those
/// corresponding to the range or the score
///
/// If the column has multiple entries, return the first
fn get_first(&self, key: &Self::ColumnKey) -> Option<&[u8]>;
fn get_all(&self, key: &Self::ColumnKey) -> Vec<&[u8]>;
}
pub trait AnnotationCollection {
type ColumnKey: ColumnKey;
type Record: AnnotationRecord<ColumnKey = Self::ColumnKey>;
fn file_name(&self) -> &str;
fn len(&self) -> usize;
fn all_columns(&self) -> Vec<Self::ColumnKey>;
fn mandatory_columns(&self) -> Vec<Self::ColumnKey>;
fn optional_columns(&self) -> Vec<Self::ColumnKey>;
fn records(&self) -> &[Self::Record];
fn wrap_column(column: Self::ColumnKey) -> AnnotationColumn;
}
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash)]
pub enum Strand {
Pos,
Neg,
None,
}
impl std::str::FromStr for Strand {
type Err = ();
fn from_str(s: &str) -> Result<Self, ()> {
if s == "+" {
Ok(Strand::Pos)
} else if s == "-" {
Ok(Strand::Neg)
} else if s == "." {
Ok(Strand::None)
} else {
Err(())
}
}
}
// NB: this assumes that the path name is of the form
// "path_name#seq_id:start-end", where seq_id is a string, and start
// and end are unsigned integers
pub fn path_name_chr_range(path_name: &[u8]) -> Option<(&[u8], usize, usize)> {
let pos_start_ix = path_name.find_byte(b'#')?;
if pos_start_ix + 1 >= path_name.len() {
return None;
}
let pos_str = &path_name[pos_start_ix + 1..];
let seq_id_end = pos_str.find_byte(b':')?;
let range_mid = pos_str.find_byte(b'-')?;
if range_mid + 1 >= pos_str.len() {
return None;
}
let chr = &pos_str[..seq_id_end];
let start_str = pos_str[seq_id_end + 1..range_mid].to_str().ok()?;
let start: usize = start_str.parse().ok()?;
let end_str = pos_str[range_mid + 1..].to_str().ok()?;
let end: usize = end_str.parse().ok()?;
Some((chr, start, end))
}
pub fn path_name_range(path_name: &[u8]) -> Option<(&[u8], usize, usize)> {
let mut range_split = path_name.split_str(":");
let name = range_split.next()?;
let range = range_split.next()?;
let mut start_end = range.split_str("-");
let start = start_end.next()?;
let start_str = start.to_str().ok()?;
let start = start_str.parse().ok()?;
let end = start_end.next()?;
let end_str = end.to_str().ok()?;
let end = end_str.parse().ok()?;
Some((name, start, end))
}
pub fn path_name_offset(path_name: &[u8]) -> Option<usize> {
path_name_range(path_name).map(|(_, s, _)| s)
/*
let mut range_split = path_name.split_str(":");
let _name = range_split.next()?;
let range = range_split.next()?;
let mut start_end = range.split_str("-");
let start = start_end.next()?;
let start_str = start.to_str().ok()?;
start_str.parse().ok()
*/
}
pub fn path_step_range(
steps: &[(Handle, StepPtr, usize)],
offset: Option<usize>,
start: usize,
end: usize,
) -> Option<&[(Handle, StepPtr, usize)]> {
let offset = offset.unwrap_or(0);
let len = end - start;
let start = start.checked_sub(offset).unwrap_or(0);
let end = end.checked_sub(offset).unwrap_or(start + len);
let (start, end) = {
let start = steps.binary_search_by_key(&start, |(_, _, p)| *p);
let end = steps.binary_search_by_key(&end, |(_, _, p)| *p);
let (start, end) = match (start, end) {
(Ok(s), Ok(e)) => (s, e),
(Ok(s), Err(e)) => (s, e),
(Err(s), Ok(e)) => (s, e),
(Err(s), Err(e)) => (s, e),
};
let end = end.min(steps.len());
Some((start, end))
}?;
Some(&steps[start..end])
}
pub fn path_step_radius(
steps: &[(Handle, StepPtr, usize)],
nodes: &[Node],
step_ix: usize,
radius: f32,
) -> FxHashSet<NodeId> {
let (handle, _, _) = steps[step_ix];
let node = handle.id();
let node_ix = (node.0 as usize) - 1;
let origin = nodes[node_ix].center();
let rad_sqr = radius * radius;
steps
.iter()
.filter_map(|(handle, _, _)| {
let ix = (handle.id().0 - 1) as usize;
let pos = nodes.get(ix)?.center();
if pos.dist_sqr(origin) <= rad_sqr {
let id = NodeId::from((ix + 1) as u64);
Some(id)
} else {
None
}
})
.collect()
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct ClusterIndices {
pub label_indices: Vec<usize>,
pub offset_ix: usize,
}
pub struct ClusterCache {
pub label_set: Arc<AnnotationLabelSet>,
pub cluster_offsets: Vec<Point>,
pub node_labels: FxHashMap<NodeId, ClusterIndices>,
pub view_scale: f32,
pub radius: f32,
}
impl ClusterCache {
/*
pub fn clusters(
&self,
) -> impl Iterator<Item = (NodeId, Point, &'_ [usize])> + '_ {
self.node_labels.iter().map(|(node, cluster_indices)| {
(
*node,
self.cluster_offsets[cluster_indices.offset_ix],
cluster_indices.label_indices.as_slice(),
)
})
}
*/
pub fn new_cluster(
steps: &[(Handle, StepPtr, usize)],
nodes: &[Node],
label_set: &Arc<AnnotationLabelSet>,
view: View,
radius: f32,
) -> Self {
let mut node_label_indices: FxHashMap<NodeId, ClusterIndices> =
FxHashMap::default();
let mut cluster_offsets: Vec<Point> = Vec::new();
let mut cluster_range_ix: Option<(usize, usize)> = None;
let mut cluster_start_pos: Option<Point> = None;
let mut current_cluster: Vec<usize> = Vec::new();
let mut clusters: FxHashMap<(usize, usize), Vec<usize>> =
FxHashMap::default();
let view_matrix = view.to_scaled_matrix();
let to_screen = |p: Point| {
let v = glm::vec4(p.x, p.y, 0.0, 1.0);
let v_ = view_matrix * v;
Point::new(v_[0], v_[1])
};
for (ix, (handle, _, _)) in steps.iter().enumerate() {
let node = handle.id();
if let Some(label_indices) = label_set.labels.get(&node) {
let node_ix = (node.0 - 1) as usize;
let node_pos = to_screen(nodes[node_ix].center());
if let Some(start_pos) = cluster_start_pos {
if node_pos.dist(start_pos) <= radius {
cluster_range_ix.as_mut().map(|(_, end)| *end = ix);
current_cluster.extend_from_slice(label_indices);
} else {
clusters.insert(
cluster_range_ix.unwrap(),
current_cluster.clone(),
);
current_cluster.clear();
cluster_start_pos = Some(node_pos);
cluster_range_ix = Some((ix, ix));
current_cluster.extend_from_slice(label_indices);
}
} else {
cluster_start_pos = Some(node_pos);
cluster_range_ix = Some((ix, ix));
current_cluster.extend_from_slice(label_indices);
}
}
}
for ((start, end), cluster_label_indices) in clusters {
let slice = &steps[start..=end];
let (mid_handle, _, _) = slice[slice.len() / 2];
let (start_h, _, _) = steps[start];
let (end_h, _, _) = steps[end];
let s_ix = (start_h.id().0 - 1) as usize;
let e_ix = (end_h.id().0 - 1) as usize;
let start_p = nodes[s_ix].p0;
let end_p = nodes[e_ix].p1;
let start_v = glm::vec2(start_p.x, start_p.y);
let end_v = glm::vec2(end_p.x, end_p.y);
let del = end_v - start_v;
let rot_del = glm::rotate_vec2(&del, std::f32::consts::PI / 2.0);
let rot_del_norm = rot_del.normalize();
let offset = Point::new(rot_del_norm[0], rot_del_norm[1]);
let cluster_indices = ClusterIndices {
label_indices: cluster_label_indices,
offset_ix: cluster_offsets.len(),
};
node_label_indices.insert(mid_handle.id(), cluster_indices);
cluster_offsets.push(offset);
}
Self {
label_set: label_set.clone(),
cluster_offsets,
node_labels: node_label_indices,
view_scale: view.scale,
radius,
}
}
pub fn rebuild_cluster(
&mut self,
steps: &[(Handle, StepPtr, usize)],
nodes: &[Node],
view: View,
radius: f32,
) -> bool {
if (view.scale - self.view_scale).abs() < 0.0001
&& radius == self.radius
{
return false;
}
self.view_scale = view.scale;
self.radius = radius;
self.cluster_offsets.clear();
self.node_labels.clear();
let mut cluster_range_ix: Option<(usize, usize)> = None;
let mut cluster_start_pos: Option<Point> = None;
let mut current_cluster: Vec<usize> = Vec::new();
let label_set = &self.label_set;
let mut clusters: FxHashMap<(usize, usize), Vec<usize>> =
FxHashMap::default();
let view_matrix = view.to_scaled_matrix();
let to_screen = |p: Point| {
let v = glm::vec4(p.x, p.y, 0.0, 1.0);
let v_ = view_matrix * v;
Point::new(v_[0], v_[1])
};
for (ix, (handle, _, _)) in steps.iter().enumerate() {
let node = handle.id();
if let Some(label_indices) = label_set.labels.get(&node) {
let node_ix = (node.0 - 1) as usize;
let node_pos = to_screen(nodes[node_ix].center());
if let Some(start_pos) = cluster_start_pos {
if node_pos.dist(start_pos) <= radius {
cluster_range_ix.as_mut().map(|(_, end)| *end = ix);
current_cluster.extend_from_slice(label_indices);
} else {
clusters.insert(
cluster_range_ix.unwrap(),
current_cluster.clone(),
);
current_cluster.clear();
cluster_start_pos = Some(node_pos);
cluster_range_ix = Some((ix, ix));
current_cluster.extend_from_slice(label_indices);
}
} else {
cluster_start_pos = Some(node_pos);
cluster_range_ix = Some((ix, ix));
current_cluster.extend_from_slice(label_indices);
}
}
}
for ((start, end), cluster_label_indices) in clusters {
let slice = &steps[start..=end];
let (mid_handle, _, _) = slice[slice.len() / 2];
let (start_h, _, _) = steps[start];
let (end_h, _, _) = steps[end];
let s_ix = (start_h.id().0 - 1) as usize;
let e_ix = (end_h.id().0 - 1) as usize;
let start_p = nodes[s_ix].p0;
let end_p = nodes[e_ix].p1;
let start_v = glm::vec2(start_p.x, start_p.y);
let end_v = glm::vec2(end_p.x, end_p.y);
let del = end_v - start_v;
let rot_del = glm::rotate_vec2(&del, std::f32::consts::PI / 2.0);
let rot_del_norm = rot_del.normalize();
let offset = Point::new(rot_del_norm[0], rot_del_norm[1]);
let cluster_indices = ClusterIndices {
label_indices: cluster_label_indices,
offset_ix: self.cluster_offsets.len(),
};
self.node_labels.insert(mid_handle.id(), cluster_indices);
self.cluster_offsets.push(offset);
}
true
}
}
pub fn cluster_annotations(
steps: &[(Handle, StepPtr, usize)],
nodes: &[Node],
view: View,
node_labels: &FxHashMap<NodeId, Vec<String>>,
radius: f32,
) -> FxHashMap<NodeId, (Point, Vec<String>)> {
let mut cluster_range_ix: Option<(usize, usize)> = None;
let mut cluster_start_pos: Option<Point> = None;
let mut current_cluster: Vec<String> = Vec::new();
let mut clusters: FxHashMap<(usize, usize), Vec<String>> =
FxHashMap::default();
let view_matrix = view.to_scaled_matrix();
let to_screen = |p: Point| {
let v = glm::vec4(p.x, p.y, 0.0, 1.0);
let v_ = view_matrix * v;
Point::new(v_[0], v_[1])
};
for (ix, (handle, _, _)) in steps.iter().enumerate() {
let node = handle.id();
if let Some(labels) = node_labels.get(&node) {
let node_ix = (node.0 - 1) as usize;
let node_pos = to_screen(nodes[node_ix].center());
if let Some(start_pos) = cluster_start_pos {
if node_pos.dist(start_pos) <= radius {
cluster_range_ix.as_mut().map(|(_, end)| *end = ix);
current_cluster.extend_from_slice(labels);
} else {
clusters.insert(
cluster_range_ix.unwrap(),
current_cluster.clone(),
);
current_cluster.clear();
cluster_start_pos = Some(node_pos);
cluster_range_ix = Some((ix, ix));
current_cluster.extend_from_slice(labels);
}
} else {
cluster_start_pos = Some(node_pos);
cluster_range_ix = Some((ix, ix));
current_cluster.extend_from_slice(labels);
}
}
}
// let mut res: FxHashMap<NodeId, Vec<String>> = FxHashMap::default();
clusters
.into_iter()
.map(|((start, end), labels)| {
let slice = &steps[start..=end];
let (mid_handle, _, _) = slice[slice.len() / 2];
let (start_h, _, _) = steps[start];
let (end_h, _, _) = steps[end];
let s_ix = (start_h.id().0 - 1) as usize;
let e_ix = (end_h.id().0 - 1) as usize;
let start_p = nodes[s_ix].p0;
let end_p = nodes[e_ix].p1;
let start_v = glm::vec2(start_p.x, start_p.y);
let end_v = glm::vec2(end_p.x, end_p.y);
let del = end_v - start_v;
let rot_del = glm::rotate_vec2(&del, std::f32::consts::PI / 2.0);
let rot_del_norm = rot_del.normalize();
let offset = Point::new(rot_del_norm[0], rot_del_norm[1]);
(mid_handle.id(), (offset, labels))
})
.collect()
}
pub fn record_column_hash_color<R, K>(
record: &R,
column: &K,
) -> Option<rgb::RGBA<f32>>
where
R: AnnotationRecord<ColumnKey = K>,
K: ColumnKey,
{
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let mut hasher = DefaultHasher::default();
if column == &K::start() {
record.start().hash(&mut hasher);
} else if column == &K::end() {
record.end().hash(&mut hasher);
} else {
record.get_all(column).hash(&mut hasher);
}
let (r, g, b) = crate::overlays::hash_node_color(hasher.finish());
Some(rgb::RGBA::new(r, g, b, 1.0))
}
|
#![cfg(test)]
use std::path::PathBuf;
#[test]
#[ignore]
fn get_main() {
use std::io::prelude::*;
use log::*;
use regex::Regex;
use socks::*;
let root = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
println!("root: {}", root.display());
let exp = std::fs::read_to_string(root.join("src/main.rs")).unwrap();
let act = {
// connect to socks proxy
let mut conn = Socks5Stream::connect(
"localhost:1080",
TargetAddr::Domain("myhttpd".to_owned(), 80),
)
.unwrap();
// request main.rs
write!(conn, "GET /src/main.rs HTTP/1.1\r\n").unwrap();
write!(conn, "Host: myhttpd\r\n\r\n").unwrap();
conn.flush().unwrap();
let mut conn = std::io::BufReader::new(conn);
// skip http headers
let mut line = String::new();
let mut content_length = None;
let re = Regex::new(r"Content-Length: (\d+)\r\n").unwrap();
while let Ok(_) = conn.read_line(&mut line) {
debug!("line: {:?}", line);
if line == "\r\n" {
break;
}
if let Some(m) = re.captures(&line) {
content_length = m.get(1).unwrap().as_str().parse().ok();
}
line.clear();
}
let mut buff = Vec::new();
buff.resize(content_length.unwrap(), 0);
conn.read_exact(&mut buff[..]).unwrap();
String::from_utf8_lossy(&buff).to_string()
};
assert_eq!(act, exp)
}
|
use crate::errors::PcapError;
use byteorder::ByteOrder;
use std::borrow::Cow;
use derive_into_owned::IntoOwned;
/// The systemd Journal Export Block is a lightweight containter for systemd Journal Export Format entry data.
#[derive(Clone, Debug, IntoOwned)]
pub struct SystemdJournalExportBlock<'a> {
/// A journal entry as described in the Journal Export Format documentation.
pub journal_entry: Cow<'a, [u8]>,
}
impl<'a> SystemdJournalExportBlock<'a> {
pub fn from_slice<B: ByteOrder>(slice: &'a [u8]) -> Result<(&'a[u8], Self), PcapError> {
let packet = SystemdJournalExportBlock {
journal_entry: Cow::Borrowed(slice),
};
Ok((&[], packet))
}
}
|
pub fn has_cycle() {
rs_not_supported!()
} |
//! FBX node attribute.
use std::io;
use fbxcel::pull_parser::{self as fbxbin, Result};
/// FBX node attribute.
#[derive(Debug, Clone)]
pub enum Attribute {
/// `bool`.
SingleBool(bool),
/// `i16`.
SingleI16(i16),
/// `i32`.
SingleI32(i32),
/// `i64`.
SingleI64(i64),
/// `f32`.
SingleF32(f32),
/// `f64`.
SingleF64(f64),
/// `[bool]`.
ArrayBool(Vec<bool>),
/// `[i32]`.
ArrayI32(Vec<i32>),
/// `[i64]`.
ArrayI64(Vec<i64>),
/// `[f32]`.
ArrayF32(Vec<f32>),
/// `[f64]`.
ArrayF64(Vec<f64>),
/// `String`.
String(String),
/// `[u8]`.
Binary(Vec<u8>),
}
impl Attribute {
/// Returns type name.
pub fn type_string(&self) -> &str {
match *self {
Attribute::SingleBool(_) => "bool",
Attribute::SingleI16(_) => "i16",
Attribute::SingleI32(_) => "i32",
Attribute::SingleI64(_) => "i64",
Attribute::SingleF32(_) => "f32",
Attribute::SingleF64(_) => "f64",
Attribute::ArrayBool(_) => "[bool]",
Attribute::ArrayI32(_) => "[i32]",
Attribute::ArrayI64(_) => "[i64]",
Attribute::ArrayF32(_) => "[f32]",
Attribute::ArrayF64(_) => "[f64]",
Attribute::String(_) => "String",
Attribute::Binary(_) => "[u8]",
}
}
/// Returns string representation.
pub fn value_string(&self) -> String {
match *self {
Attribute::SingleBool(val) => val.to_string(),
Attribute::SingleI16(val) => val.to_string(),
Attribute::SingleI32(val) => val.to_string(),
Attribute::SingleI64(val) => val.to_string(),
Attribute::SingleF32(val) => val.to_string(),
Attribute::SingleF64(val) => val.to_string(),
Attribute::ArrayBool(ref arr) => arr
.iter()
.enumerate()
.map(|(i, &val)| match (i & 0x0f == 0x0f, val) {
(false, false) => "0, ",
(false, true) => "1, ",
(true, false) => "0,\n",
(true, true) => "1,\n",
})
.collect(),
Attribute::ArrayI32(ref arr) => arr
.iter()
.enumerate()
.map(|(i, &val)| {
if i & 0x0f == 0x0f {
format!("{},\n", val)
} else {
format!("{}, ", val)
}
})
.collect(),
Attribute::ArrayI64(ref arr) => arr
.iter()
.enumerate()
.map(|(i, &val)| {
if i & 0x0f == 0x0f {
format!("{},\n", val)
} else {
format!("{}, ", val)
}
})
.collect(),
Attribute::ArrayF32(ref arr) => arr
.iter()
.enumerate()
.map(|(i, &val)| {
if i & 0x0f == 0x0f {
format!("{},\n", val)
} else {
format!("{}, ", val)
}
})
.collect(),
Attribute::ArrayF64(ref arr) => arr
.iter()
.enumerate()
.map(|(i, &val)| {
if i & 0x0f == 0x0f {
format!("{},\n", val)
} else {
format!("{}, ", val)
}
})
.collect(),
Attribute::String(ref val) => {
val.chars()
.fold(String::with_capacity(val.len()), |mut s, c| {
match c {
'\n' | '\t' => s.push(c),
'\r' => s.push_str("\\r"),
_ if (c <= '\x1f') || (c == '\x7f') => {
s.push_str(&format!("\\x{:02x}", c as u32))
}
c => s.push(c),
}
s
})
}
Attribute::Binary(ref arr) => arr
.iter()
.enumerate()
.map(|(i, &val)| {
if i & 0x0f == 0x0f {
format!("{:02x},\n", val)
} else {
format!("{:02x}, ", val)
}
})
.collect(),
}
}
}
/// FBX 7.4 attribute loader.
#[derive(Debug, Clone)]
pub struct AttributeLoader;
impl fbxbin::v7400::LoadAttribute for AttributeLoader {
type Output = Attribute;
fn expecting(&self) -> String {
"any attributes".to_owned()
}
fn load_bool(self, v: bool) -> Result<Self::Output> {
Ok(Attribute::SingleBool(v))
}
fn load_i16(self, v: i16) -> Result<Self::Output> {
Ok(Attribute::SingleI16(v))
}
fn load_i32(self, v: i32) -> Result<Self::Output> {
Ok(Attribute::SingleI32(v))
}
fn load_i64(self, v: i64) -> Result<Self::Output> {
Ok(Attribute::SingleI64(v))
}
fn load_f32(self, v: f32) -> Result<Self::Output> {
Ok(Attribute::SingleF32(v))
}
fn load_f64(self, v: f64) -> Result<Self::Output> {
Ok(Attribute::SingleF64(v))
}
fn load_seq_bool(
self,
iter: impl Iterator<Item = Result<bool>>,
_: usize,
) -> Result<Self::Output> {
iter.collect::<Result<_>>().map(Attribute::ArrayBool)
}
fn load_seq_i32(
self,
iter: impl Iterator<Item = Result<i32>>,
_: usize,
) -> Result<Self::Output> {
iter.collect::<Result<_>>().map(Attribute::ArrayI32)
}
fn load_seq_i64(
self,
iter: impl Iterator<Item = Result<i64>>,
_: usize,
) -> Result<Self::Output> {
iter.collect::<Result<_>>().map(Attribute::ArrayI64)
}
fn load_seq_f32(
self,
iter: impl Iterator<Item = Result<f32>>,
_: usize,
) -> Result<Self::Output> {
iter.collect::<Result<_>>().map(Attribute::ArrayF32)
}
fn load_seq_f64(
self,
iter: impl Iterator<Item = Result<f64>>,
_: usize,
) -> Result<Self::Output> {
iter.collect::<Result<_>>().map(Attribute::ArrayF64)
}
fn load_binary(self, mut reader: impl io::Read, len: u64) -> Result<Self::Output> {
let mut buf = Vec::with_capacity(len as usize);
reader.read_to_end(&mut buf)?;
Ok(Attribute::Binary(buf))
}
fn load_string(self, mut reader: impl io::Read, len: u64) -> Result<Self::Output> {
let mut buf = String::with_capacity(len as usize);
reader.read_to_string(&mut buf)?;
Ok(Attribute::String(buf))
}
}
|
#[macro_use]
extern crate dotenv_codegen;
mod common;
mod test{
use actix_http_test::TestServer;
use actix_web::http::header;
use actix_web::http;
use chrono::Duration;
use actix_http::httpmessage::HttpMessage;
use http::header::HeaderValue;
use actix_http::cookie::Cookie;
use serde_json::{json, Value};
use std::time::Duration as std_duration;
use std::cell::RefMut;
use crate::common::db_connection::establish_connection;
use crate::common::{server_test, send_request};
use ::mystore_lib::models::product::{FormProduct};
use ::mystore_lib::models::user::{NewUser, User};
use ::mystore_lib::models::price::{
PriceProductToUpdate,
FormPriceProduct,
FormPrice,
FormPriceProductsToUpdate};
#[actix_rt::test]
async fn test() {
create_user();
let srv = server_test();
let (csrf_token, request_cookie) = login(srv.borrow_mut()).await;
let shoe = FormProduct {
id: None,
name: Some("Shoe".to_string()),
stock: Some(10.4),
cost: Some(1892),
description: Some("not just your regular shoes, this one will make you jump".to_string()),
user_id: None
};
let hat = FormProduct {
id: None,
name: Some("Hat".to_string()),
stock: Some(15.0),
cost: Some(2045),
description: Some("Just a regular hat".to_string()),
user_id: None
};
let pants = FormProduct {
id: None,
name: Some("Pants".to_string()),
stock: Some(25.0),
cost: Some(3025),
description: Some("beautiful black pants that will make you look thin".to_string()),
user_id: None
};
let new_price_discount = FormPrice { id: None, name: Some("Discount".to_string()), user_id: None };
let new_price_normal = FormPrice { id: None, name: Some("Normal".to_string()), user_id: None };
let price_discount = create_a_price(srv.borrow_mut(),
csrf_token.clone(),
request_cookie.clone(),
&new_price_discount).await;
let price_normal = create_a_price(srv.borrow_mut(),
csrf_token.clone(),
request_cookie.clone(),
&new_price_normal).await;
let price_discount_db = price_discount.get("data").unwrap().get("createPrice").unwrap();
let price_discount_id: i32 = serde_json::from_value(price_discount_db.get("id").unwrap().clone()).unwrap();
let price_normal_db = price_normal.get("data").unwrap().get("createPrice").unwrap();
let price_normal_id: i32 = serde_json::from_value(price_normal_db.get("id").unwrap().clone()).unwrap();
let all_prices = FormPriceProductsToUpdate {
data: vec![
PriceProductToUpdate {
to_delete: false,
price_product: FormPriceProduct {
id: None,
product_id: None,
user_id: None,
price_id: price_discount_id,
amount: Some(10)
}
},
PriceProductToUpdate {
to_delete: false,
price_product: FormPriceProduct {
id: None,
product_id: None,
user_id: None,
price_id: price_normal_id,
amount: Some(15)
}
}
]
};
let response_shoe_db = create_a_product(srv.borrow_mut(),
csrf_token.clone(),
request_cookie.clone(),
&shoe,
all_prices.clone()).await;
let shoe_db = response_shoe_db.get("data").unwrap().get("createProduct").unwrap();
let shoe_id: i32 = serde_json::from_value(shoe_db.get("product").unwrap().get("id").unwrap().clone()).unwrap();
let response_hat_db = create_a_product(srv.borrow_mut(),
csrf_token.clone(),
request_cookie.clone(),
&hat,
all_prices.clone()).await;
let hat_db = response_hat_db.get("data").unwrap().get("createProduct").unwrap();
let hat_id: i32 = serde_json::from_value(hat_db.get("product").unwrap().get("id").unwrap().clone()).unwrap();
let response_pants_db = create_a_product(srv.borrow_mut(),
csrf_token.clone(),
request_cookie.clone(),
&pants,
all_prices.clone()).await;
let pants_db = response_pants_db.get("data").unwrap().get("createProduct").unwrap();
let pants_id: i32 = serde_json::from_value(pants_db.get("product").unwrap().get("id").unwrap().clone()).unwrap();
show_a_product(srv.borrow_mut(),
csrf_token.clone(),
request_cookie.clone(),
shoe_id,
&shoe_db).await;
let updated_hat = FormProduct {
id: None,
name: Some("Hat".to_string()),
stock: Some(30.0),
cost: Some(3025),
description: Some("A hat with particular color, a dark black shining and beautiful".to_string()),
user_id: None
};
update_a_product(srv.borrow_mut(),
csrf_token.clone(),
request_cookie.clone(),
&updated_hat,
all_prices.clone()).await;
let response_product_destroyed =
destroy_a_product(srv.borrow_mut(),
csrf_token.clone(),
request_cookie.clone(),
&pants_id).await;
let destroyed: bool =
serde_json::from_value(
response_product_destroyed
.get("data")
.unwrap()
.get("destroyProduct")
.unwrap()
.clone()
).unwrap();
assert!(destroyed);
let data_for_searching = json!({
"data": {
"listProduct": {
"data": [{
"priceProducts": [
{
"price": {
"name": "Discount"
},
"priceProduct": {
"amount": 10
}
},
{
"price": {
"name": "Normal"
},
"priceProduct": {
"amount": 15
}
}
],
"product": {
"cost": 2045,
"name": "Hat",
"description": "Just a regular hat",
"id": hat_id,
"stock": 15.0
}
}]
}
}
});
search_products(srv.borrow_mut(),
csrf_token,
request_cookie,
data_for_searching).await;
}
async fn login(srv: RefMut<'_, TestServer>) -> (HeaderValue, Cookie<'_>) {
let request = srv
.post("/login")
.header(header::CONTENT_TYPE, "application/json")
.timeout(std_duration::from_secs(600));
let response =
request
.send_body(r#"{"email":"jhon@doe.com","password":"12345678"}"#)
.await
.unwrap();
let csrf_token = response.headers().get("x-csrf-token").unwrap();
let cookies = response.cookies().unwrap();
let cookie = cookies[0].clone().into_owned().value().to_string();
let request_cookie = Cookie::build("mystorejwt", cookie)
.domain("localhost")
.path("/")
.max_age(Duration::days(1).num_seconds())
.secure(false)
.http_only(false)
.finish();
(csrf_token.clone(), request_cookie.clone())
}
fn create_user() -> User {
use diesel::RunQueryDsl;
use ::mystore_lib::schema::users;
use chrono::Local;
let connection = establish_connection();
let pg_pool = connection.get().unwrap();
diesel::delete(users::table).execute(&pg_pool).unwrap();
diesel::insert_into(users::table)
.values(NewUser {
email: "jhon@doe.com".to_string(),
company: "My own personal enterprise".to_string(),
password: User::hash_password("12345678".to_string()).unwrap(),
created_at: Local::now().naive_local()
})
.get_result::<User>(&pg_pool).unwrap()
}
async fn create_a_product(srv: RefMut<'_, TestServer>,
csrf_token: HeaderValue,
request_cookie: Cookie<'_>,
product: &FormProduct,
prices: FormPriceProductsToUpdate) -> Value {
let prices_to_s: Vec<String> = prices.data.iter().map(|price| {
format!(
r#"
{{
"toDelete": {},
"priceProduct": {{
"priceId": {},
"amount": {}
}}
}}"#,
false,
price.price_product.price_id,
price.price_product.amount.unwrap()
)
}).collect();
let query =
format!(
r#"
{{
"query": "
mutation CreateProduct($form: FormProduct!, $formPriceProducts: FormPriceProductsToUpdate!) {{
createProduct(form: $form, formPriceProducts: $formPriceProducts) {{
product {{
id
name
stock
cost
description
userId
}}
priceProducts {{
priceProduct {{
id
priceId
userId
amount
}}
price {{
id
name
userId
}}
}}
}}
}}
",
"variables": {{
"form": {{
"name": "{}",
"stock": {},
"cost": {},
"description": "{}"
}},
"formPriceProducts": {{ "data": [{}] }}
}}
}}"#,
product.clone().name.unwrap(),
product.clone().stock.unwrap(),
product.clone().cost.unwrap(),
product.clone().description.unwrap(),
prices_to_s.join(","))
.replace("\n", "");
send_request(srv, csrf_token, request_cookie, query).await
}
async fn show_a_product(srv: RefMut<'_, TestServer>,
csrf_token: HeaderValue,
request_cookie: Cookie<'_>,
id: i32,
expected_product: &Value) {
let query = format!(r#"
{{
"query": "
query ShowProduct($productId: Int!) {{
showProduct(productId: $productId) {{
product {{
id
name
stock
cost
description
userId
}}
priceProducts {{
priceProduct {{
id
priceId
userId
amount
}}
price {{
id
name
userId
}}
}}
}}
}}
",
"variables": {{
"productId": {}
}}
}}
"#, id).replace("\n", "");
let response_product: Value = send_request(srv, csrf_token, request_cookie, query).await;
let product = response_product.get("data").unwrap().get("showProduct").unwrap();
assert_eq!(product, expected_product);
}
async fn update_a_product(srv: RefMut<'_, TestServer>,
csrf_token: HeaderValue,
request_cookie: Cookie<'_>,
changes_to_product: &FormProduct,
prices: FormPriceProductsToUpdate) -> Value {
let prices_to_s: Vec<String> = prices.data.iter().map(|price| {
format!(
r#"
{{
"toDelete": {},
"priceProduct": {{
"priceId": {},
"amount": {}
}}
}}"#,
false,
price.price_product.price_id,
price.price_product.amount.unwrap()
)
}).collect();
let query =
format!(
r#"
{{
"query": "
mutation UpdateProduct($paramFormProduct: FormProduct!, $paramFormPriceProducts: FormPriceProductsToUpdate!) {{
updateProduct(paramFormProduct: $paramFormProduct, paramFormPriceProducts: $paramFormPriceProducts) {{
product {{
id
name
stock
cost
description
userId
}}
priceProducts {{
priceProduct {{
id
priceId
userId
amount
}}
price {{
id
name
userId
}}
}}
}}
}}
",
"variables": {{
"paramFormProduct": {{
"name": "{}",
"stock": {},
"cost": {},
"description": "{}"
}},
"paramFormPriceProducts": {{ "data": [{}] }}
}}
}}"#,
changes_to_product.clone().name.unwrap(),
changes_to_product.clone().stock.unwrap(),
changes_to_product.clone().cost.unwrap(),
changes_to_product.clone().description.unwrap(),
prices_to_s.join(","))
.replace("\n", "");
send_request(srv, csrf_token, request_cookie, query).await
}
async fn destroy_a_product(srv: RefMut<'_, TestServer>,
csrf_token: HeaderValue,
request_cookie: Cookie<'_>,
id: &i32) -> Value {
let query = format!(r#"
{{
"query": "
mutation DestroyAProduct($productId: Int!) {{
destroyProduct(productId: $productId)
}}
",
"variables": {{
"productId": {}
}}
}}
"#, id).replace("\n", "");
send_request(srv, csrf_token, request_cookie, query).await
}
async fn search_products(srv: RefMut<'_, TestServer>,
csrf_token: HeaderValue,
request_cookie: Cookie<'_>,
data_for_searching: Value) {
let query = format!(r#"
{{
"query": "
query ListProduct($search: String!, $limit: Int!, $rank: Float!) {{
listProduct(search: $search, limit: $limit, rank: $rank) {{
data {{
product {{
id
name
stock
cost
description
}}
priceProducts {{
priceProduct {{
amount
}}
price {{
name
}}
}}
}}
}}
}}
",
"variables": {{
"search": "hat",
"limit": 10,
"rank": 1.0
}}
}}
"#).replace("\n", "");
let response_sales: Value = send_request(srv, csrf_token, request_cookie, query).await;
assert_eq!(data_for_searching, response_sales);
}
async fn create_a_price(srv: RefMut<'_, TestServer>,
csrf_token: HeaderValue,
request_cookie: Cookie<'_>,
price: &FormPrice) -> Value {
let query =
format!(
r#"
{{
"query": "
mutation createPrice($form: FormPrice!) {{
createPrice(form: $form) {{
id
name
userId
}}
}}
",
"variables": {{
"form": {{
"name": "{}"
}}
}}
}}"#,
price.clone().name.unwrap())
.replace("\n", "");
send_request(srv, csrf_token, request_cookie, query).await
}
} |
use tokio::process::Command;
use anyhow::{Result, Context};
use async_trait::async_trait;
use crate::{
services::model::{Nameable, Ensurable, is_binary_present},
helpers::ExitStatusIntoUnit
};
static NAME: &str = "curl";
#[derive(Default)]
pub struct Curl {}
impl Nameable for Curl {
fn name(&self) -> &'static str {
NAME
}
}
#[async_trait]
impl Ensurable for Curl {
async fn is_present(&self) -> Result<bool> {
is_binary_present(self).await
}
async fn make_present(&self) -> Result<()> {
Command::new("apt-get")
.arg("update")
.status().await
.status_to_unit()
.context("Unable to update apt-get.")?;
Command::new("apt-get")
.arg("-y")
.arg("install")
.arg("curl")
.status().await
.status_to_unit()
.context("Unable to install curl via apt-get. You can install python3 and pip3 manually, and try `kfpl init` again.")?;
Command::new("which")
.arg("curl")
.status().await
.status_to_unit()
.context("Unable to verify curl installation.")?;
Ok(())
}
}
|
// Copyright (C) 2020 Sebastian Dröge <sebastian@centricular.com>
//
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use super::*;
/// `Session` header ([RFC 7826 section 18.49](https://tools.ietf.org/html/rfc7826#section-18.49)).
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Session(
/// Session identifier.
pub String,
/// Optional session timeout in seconds.
pub Option<u64>,
);
impl Session {
pub fn with_timeout(id: String, timeout: u64) -> Self {
Self(id, Some(timeout))
}
}
impl std::ops::Deref for Session {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl AsRef<str> for Session {
fn as_ref(&self) -> &str {
&self.0
}
}
impl<'a> From<&'a str> for Session {
fn from(v: &'a str) -> Session {
Session(v.into(), None)
}
}
impl From<String> for Session {
fn from(v: String) -> Session {
Session(v, None)
}
}
impl super::TypedHeader for Session {
fn from_headers(headers: impl AsRef<Headers>) -> Result<Option<Self>, HeaderParseError> {
let headers = headers.as_ref();
let header = match headers.get(&SESSION) {
None => return Ok(None),
Some(header) => header,
};
let mut iter = header.as_str().split(';');
let session_id = iter.next().ok_or(HeaderParseError)?;
let timeout = iter
.next()
.map(|s| s.parse::<u64>())
.transpose()
.map_err(|_| HeaderParseError)?;
Ok(Some(Session(session_id.into(), timeout)))
}
fn insert_into(&self, mut headers: impl AsMut<Headers>) {
let headers = headers.as_mut();
if let Some(timeout) = self.1 {
headers.insert(SESSION, format!("{};timeout={}", self.0, timeout));
} else {
headers.insert(SESSION, self.0.to_string());
}
}
}
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
// Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! This file defines ledger store APIs that are related to the main ledger accumulator, from the
//! root(LedgerInfo) to leaf(TransactionInfo).
use crate::error::SgStorageError;
use crate::schema::channel_transaction_info::*;
use crate::schema::{channel_transaction_accumulator::*, ledger_info_schema::*};
use crate::schema_db::SchemaDB;
use accumulator::{HashReader, MerkleAccumulator};
use anyhow::{format_err, Result};
use libra_crypto::{hash::CryptoHash, HashValue};
use libra_types::proof::position::Position;
use libra_types::proof::AccumulatorConsistencyProof;
use libra_types::transaction::Version;
use schemadb::{ReadOptions, SchemaBatch};
use sgtypes::ledger_info::LedgerInfo;
use sgtypes::{
channel_transaction_info::ChannelTransactionInfo, hash::ChannelTransactionAccumulatorHasher,
proof::ChannelTransactionAccumulatorProof,
};
use std::fmt::Formatter;
use std::sync::Arc;
use std::sync::RwLock;
#[derive(Clone)]
pub struct LedgerStore<S> {
db: S,
latest_ledger_info: Arc<RwLock<Option<LedgerInfo>>>,
}
impl<S> core::fmt::Debug for LedgerStore<S>
where
S: core::fmt::Debug,
{
fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result {
write!(
f,
"db: {:?}, latest_ledger_info: {:?}",
self.db, self.latest_ledger_info
)
}
}
impl<S> LedgerStore<S> {
pub fn new(db: S) -> Self {
Self {
db,
latest_ledger_info: Arc::new(RwLock::new(None)),
}
}
}
impl<S> LedgerStore<S>
where
S: SchemaDB,
{
// Upon restart, read the latest ledger info and signatures and cache them in memory.
pub fn bootstrap(&self) {
let ledger_info = {
let mut iter = self
.db
.iter::<LedgerInfoSchema>(ReadOptions::default())
.expect("Constructing iterator should work.");
iter.seek_to_last();
iter.next()
.transpose()
.expect("Reading latest ledger info from DB should work.")
.map(|kv| kv.1)
};
if let Some(ledger_info) = ledger_info {
self.set_latest_ledger_info(ledger_info);
}
}
/// Return the ledger infos starting from `start_epoch` to
/// the most recent one.
/// Note: ledger infos and signatures are only available at the last version of each earlier
/// epoch and at the latest version of current epoch.
pub fn get_latest_ledger_infos_per_epoch(&self, start_epoch: u64) -> Result<Vec<LedgerInfo>> {
let mut iter = self.db.iter::<LedgerInfoSchema>(ReadOptions::default())?;
iter.seek(&start_epoch)?;
Ok(iter.map(|kv| Ok(kv?.1)).collect::<Result<Vec<_>>>()?)
}
pub fn get_latest_ledger_info_option(&self) -> Option<LedgerInfo> {
let ledger_info_ptr = self.latest_ledger_info.read().unwrap();
(*ledger_info_ptr).clone()
}
pub fn get_latest_ledger_info(&self) -> Result<LedgerInfo> {
self.get_latest_ledger_info_option()
.ok_or_else(|| SgStorageError::NotFound(String::from("Genesis LedgerInfo")).into())
}
pub fn set_latest_ledger_info(&self, ledger_info_with_sigs: LedgerInfo) {
*self.latest_ledger_info.write().unwrap() = Some(ledger_info_with_sigs);
}
/// Get transaction info given `version`
pub fn get_transaction_info(&self, version: Version) -> Result<ChannelTransactionInfo> {
self.db
.get::<ChannelTransactionInfoSchema>(&version)?
.ok_or_else(|| format_err!("No TransactionInfo at version {}", version))
}
pub fn get_latest_transaction_info_option(
&self,
) -> Result<Option<(Version, ChannelTransactionInfo)>> {
let mut iter = self
.db
.iter::<ChannelTransactionInfoSchema>(ReadOptions::default())?;
iter.seek_to_last();
iter.next().transpose()
}
/// Get latest transaction info together with its version. Note that during node syncing, this
/// version can be greater than what's in the latest LedgerInfo.
pub fn get_latest_transaction_info(&self) -> Result<(Version, ChannelTransactionInfo)> {
self.get_latest_transaction_info_option()?.ok_or_else(|| {
SgStorageError::NotFound(String::from("Genesis TransactionInfo.")).into()
})
}
/// Get transaction info at `version` with proof towards root of ledger at `ledger_version`.
pub fn get_transaction_info_with_proof(
&self,
version: Version,
ledger_version: Version,
) -> Result<(ChannelTransactionInfo, ChannelTransactionAccumulatorProof)> {
Ok((
self.get_transaction_info(version)?,
self.get_transaction_proof(version, ledger_version)?,
))
}
/// Get proof for transaction at `version` towards root of ledger at `ledger_version`.
pub fn get_transaction_proof(
&self,
version: Version,
ledger_version: Version,
) -> Result<ChannelTransactionAccumulatorProof> {
Accumulator::get_proof(self, ledger_version + 1 /* num_leaves */, version)
}
/// Gets proof that shows the ledger at `ledger_version` is consistent with the ledger at
/// `client_known_version`.
pub fn get_consistency_proof(
&self,
client_known_version: Version,
ledger_version: Version,
) -> Result<AccumulatorConsistencyProof> {
Accumulator::get_consistency_proof(self, ledger_version + 1, client_known_version + 1)
}
/// From left to right, get frozen subtree root hashes of the transaction accumulator.
pub fn get_ledger_frozen_subtree_hashes(&self, version: Version) -> Result<Vec<HashValue>> {
Accumulator::get_frozen_subtree_hashes(self, version + 1)
}
/// Write `txn_info` to `batch`. Assigned `version` to the the version number of the
/// transaction.
pub fn put_tx_info(
&self,
version: Version,
tx_info: ChannelTransactionInfo,
batch: &mut SchemaBatch,
) -> Result<HashValue> {
let tx_info_hash = tx_info.hash();
let (root_hash, writes) = Accumulator::append(self, version, &[tx_info_hash])?;
batch.put::<ChannelTransactionInfoSchema>(&version, &tx_info)?;
for (pos, hash) in writes.iter() {
batch.put::<ChannelTransactionAccumulatorSchema>(pos, hash)?;
}
Ok(root_hash)
}
/// Write `ledger_info` to `cs`.
pub fn put_ledger_info(&self, ledger_info: &LedgerInfo, cs: &mut SchemaBatch) -> Result<()> {
cs.put::<LedgerInfoSchema>(&ledger_info.epoch(), ledger_info)
}
}
type Accumulator<T> = MerkleAccumulator<LedgerStore<T>, ChannelTransactionAccumulatorHasher>;
impl<S> HashReader for LedgerStore<S>
where
S: SchemaDB,
{
fn get(&self, position: Position) -> Result<HashValue> {
self.db
.get::<ChannelTransactionAccumulatorSchema>(&position)?
.ok_or_else(|| format_err!("{} does not exist.", position))
}
}
|
use super::ty::{IntoIri, IriRanges};
use locate::InputLocate;
use locate::WithPos;
use nom::{
branch::alt,
bytes::complete::tag,
character::complete::{alpha1, char, digit0, digit1, hex_digit1},
combinator::{map, opt},
error::{convert_error, ErrorKind, ParseError, VerboseError},
multi::{count, many0, many1, many_m_n},
sequence::{preceded, terminated, tuple},
AsChar, Compare, Err, IResult, InputIter, InputLength, InputTake, InputTakeAtPosition, Slice,
};
use std::ops::{Range, RangeFrom};
pub mod error;
mod locate;
struct Spanned<T> {
start: usize,
end: usize,
value: T,
}
impl<T> Spanned<T> {
#[inline]
fn len(&self) -> usize {
self.end - self.start
}
#[inline]
fn with_value<U>(&self, value: U) -> Spanned<U> {
Spanned {
start: self.start,
end: self.end,
value,
}
}
#[inline]
fn input<U: InputTake>(&self, input: &U) -> Spanned<U> {
Spanned {
start: self.start,
end: self.end,
value: input.take(self.len()),
}
}
fn into_range(self) -> Range<usize> {
self.start..self.end
}
}
trait IResultExt<I, T, E: ParseError<I>>: Sized {
fn add_err_context(self, input: I, context: &'static str) -> Self;
}
impl<I, T, E: ParseError<I>> IResultExt<I, T, E> for IResult<I, T, E> {
#[inline]
fn add_err_context(self, input: I, context: &'static str) -> Self {
match self {
Ok(o) => Ok(o),
Err(Err::Incomplete(i)) => Err(Err::Incomplete(i)),
Err(Err::Error(e)) => Err(Err::Error(E::add_context(input, context, e))),
Err(Err::Failure(e)) => Err(Err::Failure(E::add_context(input, context, e))),
}
}
}
// fn position<T, E: ParseError<T>>(input: T) -> IResult<T, usize, E>
// where
// T: InputLocate,
// {
// let offset = input.offset();
// Ok((input, offset))
// }
fn spanned<I, T, E: ParseError<I>>(
parser: impl Fn(I) -> IResult<I, T, E>,
) -> impl Fn(I) -> IResult<I, Spanned<T>, E>
where
I: InputLocate,
{
move |input| {
let start = input.offset();
match parser(input) {
Ok((i, value)) => {
let end = i.offset();
Ok((i, Spanned { start, end, value }))
}
Err(e) => Err(e),
}
}
}
fn ignore<I, T, E: ParseError<I>>(
parser: impl Fn(I) -> IResult<I, T, E>,
) -> impl Fn(I) -> IResult<I, (), E> {
move |input| match parser(input) {
Ok((i, _)) => Ok((i, ())),
Err(e) => Err(e),
}
}
fn exact<I: InputLength + Clone, T, E: ParseError<I>>(
parser: impl Fn(I) -> IResult<I, T, E>,
) -> impl Fn(I) -> IResult<I, T, E> {
move |input| {
let cloned = input.clone();
match parser(input) {
Ok((i, v)) => {
if i.input_len() == 0 {
Ok((i, v))
} else {
Err(Err::Error(E::from_error_kind(i, ErrorKind::NonEmpty)))
}
}
Err(e) => Err(e).add_err_context(cloned, "exact"),
}
}
}
macro_rules! between {
($chr:expr, $low:expr => $hi:expr,) => {
if $chr >= $low && $chr <= $hi {
true
} else {
false
}
};
($chr:expr, $low:expr => $hi:expr, $($restlow:expr => $resthi:expr,)+) => {
if $chr >= $low && $chr <= $hi {
true
} else { between!($chr, $($restlow => $resthi,)+) }
};
}
macro_rules! char_parsers {
(mod $name:ident($item:ident: impl AsChar) -> bool $body:block) => {
#[allow(dead_code)]
mod $name {
use super::*;
pub(super) fn is($item: impl AsChar) -> bool $body
#[inline]
pub(super) fn is_not(item: impl AsChar) -> bool {
!is(item)
}
pub(super) fn single<T, E: ParseError<T>>(input: T) -> IResult<T, char, E>
where
T: Slice<RangeFrom<usize>> + InputIter + Clone,
<T as InputIter>::Item: AsChar,
{
match input.iter_elements().next().map(|t| {
let c = t.as_char();
let b = is(c);
(c, b)
}) {
Some((c, true)) => Ok((input.slice(c.len()..), c)),
_ => Err(
Err::Error(
E::add_context(
input.clone(),
stringify!($name::single),
E::from_error_kind(input, ErrorKind::NoneOf))
)
)
}
}
pub(super) fn many0<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: InputTakeAtPosition + InputLength,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position_complete(is_not)
.add_err_context(input, stringify!($name::many0))
}
pub(super) fn many1<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: InputTakeAtPosition + InputLength,
<T as InputTakeAtPosition>::Item: AsChar,
{
input.split_at_position1_complete(is_not, ErrorKind::NoneOf)
.add_err_context(input, stringify!($name::many1))
}
}
};
}
macro_rules! parse_between {
($name:ident, $($restlow:expr => $resthi:expr,)+) => {
char_parsers! {
mod $name(item: impl AsChar) -> bool {
let chr = item.as_char();
between!(chr, $($restlow => $resthi,)+)
}
}
};
}
parse_between!(iprivate,
'\u{E000}' => '\u{F8FF}',
'\u{F0000}' => '\u{FFFFD}',
'\u{100000}' => '\u{10FFFD}',
);
parse_between!(ucschar,
'\u{A0}' => '\u{D7FF}',
'\u{F900}' => '\u{FDCF}',
'\u{FDF0}' => '\u{FFEF}',
'\u{10000}' => '\u{1FFFD}',
'\u{20000}' => '\u{2FFFD}',
'\u{30000}' => '\u{3FFFD}',
'\u{40000}' => '\u{4FFFD}',
'\u{50000}' => '\u{5FFFD}',
'\u{60000}' => '\u{6FFFD}',
'\u{70000}' => '\u{7FFFD}',
'\u{80000}' => '\u{8FFFD}',
'\u{90000}' => '\u{9FFFD}',
'\u{A0000}' => '\u{AFFFD}',
'\u{B0000}' => '\u{BFFFD}',
'\u{C0000}' => '\u{CFFFD}',
'\u{D0000}' => '\u{DFFFD}',
'\u{E1000}' => '\u{EFFFD}',
);
char_parsers! {
mod iunreserved(item: impl AsChar) -> bool {
let c = item.as_char();
if c.is_alphanum() {
true
} else {
match c {
'-' | '.' | '_' | '~' => true,
c => ucschar::is(c)
}
}
}
}
fn pct_encoded<T, E: ParseError<T>>(input: T) -> IResult<T, u8, E>
where
T: Slice<RangeFrom<usize>> + InputIter + Clone,
<T as InputIter>::Item: AsChar,
{
#[inline]
fn hex_digit(c: char) -> u8 {
c.to_digit(16).unwrap() as u8
}
let mut iter = input
.iter_indices()
.map(|(pos, c)| (pos, AsChar::as_char(c)));
let result = match iter.next() {
Some((_, '%')) => match iter.next() {
Some((_, c1)) if c1.is_hex_digit() => match iter.next() {
Some((pos, c2)) if c2.is_hex_digit() => {
let pos = pos + c2.len();
let n1 = hex_digit(c1);
let n2 = hex_digit(c2);
Some((pos, (n1 << 4) + n2))
}
_ => None,
},
_ => None,
},
_ => None,
};
match result {
Some((pos, num)) => Ok((input.slice(pos..), num)),
None => Err(Err::Error(E::from_error_kind(
input.clone(),
ErrorKind::Tag,
)))
.add_err_context(input, "pct_encoded"),
}
}
char_parsers! {
mod sub_delims(item: impl AsChar) -> bool {
let c = item.as_char();
match c {
'!' | '$' | '&' | '\'' | '(' | ')' | '*' | '+' | ',' | ';' | '=' => true,
_ => false,
}
}
}
char_parsers! {
mod gen_delims(item: impl AsChar) -> bool {
let c = item.as_char();
match c {
':' | '/' | '?' | '#' | '[' | ']' | '@' => true,
_ => false,
}
}
}
char_parsers! {
// sub_delims | gen_delims
mod reserved(item: impl AsChar) -> bool {
let c = item.as_char();
match c {
'!' | '$' | '&' | '\'' | '(' | ')' | '*' | '+' | ',' | ';'
| '=' | ':' | '/' | '?' | '#' | '[' | ']' | '@' => true,
_ => false,
}
}
}
char_parsers! {
mod unreserved(item: impl AsChar) -> bool {
let c = item.as_char();
if c.is_alphanum() {
true
} else {
match c {
'-' | '.' | '_' | '~' => true,
_ => false,
}
}
}
}
fn dec_octet<T, E: ParseError<T>>(input: T) -> IResult<T, u8, E>
where
T: Slice<RangeFrom<usize>> + InputIter + Clone,
<T as InputIter>::Item: AsChar,
{
let mut iter = input.iter_indices().map(|(i, c)| (i, c.as_char()));
let mut pos = 0;
let mut num = 0;
loop {
if let Some((i, c)) = iter.next() {
if c.is_dec_digit() {
let digit = c.to_digit(10).unwrap();
let new_num = num * 10 + digit;
if new_num < 256 {
num = new_num;
pos = i + c.len();
if new_num == 0 {
break;
}
} else {
break;
}
} else {
break;
}
} else {
break;
}
}
if pos > 0 {
Ok((input.slice(pos..), num as u8))
} else {
Err(Err::Error(E::from_error_kind(
input.clone(),
ErrorKind::Digit,
)))
.add_err_context(input, "dec_octet")
}
}
fn ip4_address<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>> + InputTake + InputIter + InputLocate + Clone,
<T as InputIter>::Item: AsChar,
{
let cloned = input.clone();
let dot = |i| char('.')(i);
match spanned(tuple((
dec_octet, dot, dec_octet, dot, dec_octet, dot, dec_octet,
)))(input)
{
Ok((rest, spanned)) => Ok((rest, cloned.take(spanned.len()))),
Err(e) => Err(e).add_err_context(cloned, "ip4_address"),
}
}
fn h16<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: InputTake + InputIter + Clone,
<T as InputIter>::Item: AsChar,
{
let cloned = input.clone();
match input
.iter_indices()
.map(|(i, c)| (i, c.as_char()))
.take_while(|(_, c)| c.is_hex_digit())
.take(4)
.last()
{
Some((i, c)) => {
let pos = i + c.len();
Ok(cloned.take_split(pos))
}
None => Err(Err::Error(E::from_error_kind(
input.clone(),
ErrorKind::HexDigit,
)))
.add_err_context(input, "h16"),
}
}
fn ls32<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>> + InputIter + InputLocate + InputTake + Clone,
<T as InputIter>::Item: AsChar,
{
fn h32<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>> + InputIter + InputLocate + InputTake + Clone,
<T as InputIter>::Item: AsChar,
{
let cloned = input.clone();
match spanned(tuple((h16, char(':'), h16)))(input) {
Ok((rest, spanned)) => Ok((rest, cloned.take(spanned.len()))),
Err(e) => Err(e).add_err_context(cloned, "ls32::h32"),
}
}
let cloned = input.clone();
alt((h32, ip4_address))(input).add_err_context(cloned, "ls32")
}
// IPv6address = 6( h16 ":" ) ls32
// / "::" 5( h16 ":" ) ls32
// / [ h16 ] "::" 4( h16 ":" ) ls32
// / [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
// / [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
// / [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
// / [ *4( h16 ":" ) h16 ] "::" ls32
// / [ *5( h16 ":" ) h16 ] "::" h16
// / [ *6( h16 ":" ) h16 ] "::"
fn ip6_address<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>>
+ InputIter
+ InputLocate
+ InputTake
+ Clone
+ PartialEq
+ Compare<&'static str>,
<T as InputIter>::Item: AsChar,
{
// helpers
let h16colon = |input| ignore(preceded(h16, char(':')))(input);
let dbl_col = |input| tag("::")(input);
let max = |f, n| many_m_n(0, n, f);
#[rustfmt::skip]
let addr = {
// lines in spec
let l1 = ignore(tuple((count(h16colon, 6), ls32)));
let l2 = ignore(tuple((dbl_col, count(h16colon, 5), ls32)));
let l3 = ignore(tuple((opt(h16), dbl_col, count(h16colon, 4), ls32)));
let l4 = ignore(tuple((opt(tuple((max(h16colon, 1), h16))), dbl_col, count(h16colon, 3), ls32)));
let l5 = ignore(tuple((opt(tuple((max(h16colon, 2), h16))), dbl_col, count(h16colon, 2), ls32)));
let l6 = ignore(tuple((opt(tuple((max(h16colon, 3), h16))), dbl_col, h16colon, ls32)));
let l7 = ignore(tuple((opt(tuple((max(h16colon, 4), h16))), dbl_col, ls32)));
let l8 = ignore(tuple((opt(tuple((max(h16colon, 5), h16))), dbl_col, h16)));
let l9 = ignore(tuple((opt(tuple((max(h16colon, 6), h16))), dbl_col)));
// ipv6 address
alt((l1, l2, l3, l4, l5, l6, l7, l8, l9))
};
let cloned = input.clone();
match spanned(addr)(input) {
Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
Err(e) => Err(e).add_err_context(cloned, "ip6_address"),
}
}
fn ipvfuture_address<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>>
+ InputIter
+ InputLocate
+ InputLength
+ InputTake
+ InputTakeAtPosition
+ Clone
+ PartialEq
+ Compare<&'static str>,
<T as InputIter>::Item: AsChar,
<T as InputTakeAtPosition>::Item: AsChar,
{
let p1 = char('v');
let p2 = hex_digit1;
let p3 = char('.');
let p4_alt = alt((
ignore(unreserved::many1),
ignore(sub_delims::many1),
ignore(char(':')),
));
let p4 = many_m_n(1, std::usize::MAX, p4_alt);
let full = ignore(tuple((p1, p2, p3, p4)));
let cloned = input.clone();
match spanned(full)(input) {
Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
Err(e) => Err(e).add_err_context(cloned, "ipvfuture_address"),
}
}
fn ip_literal<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>>
+ InputIter
+ InputLocate
+ InputLength
+ InputTake
+ InputTakeAtPosition
+ Clone
+ PartialEq
+ Compare<&'static str>,
<T as InputIter>::Item: AsChar,
<T as InputTakeAtPosition>::Item: AsChar,
{
let inner = alt((ip6_address, ipvfuture_address));
let outer = tuple((char('['), inner, char(']')));
let cloned = input.clone();
match spanned(outer)(input) {
Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
Err(e) => Err(e).add_err_context(cloned, "ip_literal"),
}
}
fn port<T, E: ParseError<T>>(input: T) -> IResult<T, Spanned<T>, E>
where
T: InputTakeAtPosition + InputLocate + Clone,
<T as InputTakeAtPosition>::Item: AsChar,
{
let cloned = input.clone();
spanned(digit0)(input).add_err_context(cloned, "port")
}
fn scheme<T, E: ParseError<T>>(input: T) -> IResult<T, Spanned<T>, E>
where
T: Slice<RangeFrom<usize>>
+ InputIter
+ InputLocate
+ InputLength
+ InputTake
+ InputTakeAtPosition
+ Clone
+ PartialEq
+ std::fmt::Debug
+ Compare<&'static str>,
<T as InputIter>::Item: AsChar,
<T as InputTakeAtPosition>::Item: AsChar,
{
let c2 = alt((
ignore(alpha1),
ignore(digit1),
ignore(char('+')),
ignore(char('-')),
ignore(char('.')),
));
let mult = ignore(many0(c2));
let full = tuple((alpha1, mult));
let cloned = input.clone();
match spanned(full)(input) {
Ok((i, spanned)) => Ok((i, spanned.with_value(cloned.take(spanned.len())))),
Err(e) => Err(e).add_err_context(cloned, "scheme"),
}
}
#[allow(dead_code)]
mod ipchar {
use super::*;
char_parsers! {
mod ipchar_chars(item: impl AsChar) -> bool {
let c = item.as_char();
iunreserved::is(c) || sub_delims::is(c) || c == ':' || c == '@'
}
}
fn pct<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>> + InputIter + InputTake + InputLocate + Clone,
<T as InputIter>::Item: AsChar,
{
let cloned = input.clone();
match spanned(pct_encoded)(input) {
Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
Err(e) => Err(e),
}
}
pub(super) fn single<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>> + InputIter + InputTake + InputLocate + Clone,
<T as InputIter>::Item: AsChar,
{
let cloned = input.clone();
let ip_char = move |input| match ipchar_chars::single(input) {
Ok((i, _)) => Ok((i, cloned.take(1))),
Err(e) => Err(e),
};
let cloned = input.clone();
alt((ip_char, pct))(input).add_err_context(cloned, "ipchar::single")
}
pub(super) fn many0<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Clone
+ PartialEq
+ std::fmt::Debug,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
let inner = alt((ipchar_chars::many1, pct));
let cloned = input.clone();
match spanned(super::many0(inner))(input) {
Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
Err(e) => Err(e).add_err_context(cloned, "ipchar::many0"),
}
}
pub(super) fn many1<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Clone
+ PartialEq,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
let inner = alt((ipchar_chars::many1, pct));
let cloned = input.clone();
match spanned(super::many1(inner))(input) {
Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
Err(e) => Err(e).add_err_context(cloned, "ipchar::many1"),
}
}
}
fn ifragment<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Clone
+ PartialEq
+ std::fmt::Debug,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
let inner = alt((ignore(ipchar::many1), ignore(char('/')), ignore(char('?'))));
let cloned = input.clone();
match spanned(many0(inner))(input) {
Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
Err(e) => Err(e).add_err_context(cloned, "ifragment"),
}
}
fn iquery<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Clone
+ PartialEq
+ std::fmt::Debug,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
let inner = alt((
ignore(ipchar::many1),
ignore(iprivate::many1),
ignore(char('/')),
ignore(char('?')),
));
let cloned = input.clone();
match spanned(many0(inner))(input) {
Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
Err(e) => Err(e).add_err_context(cloned, "iquery"),
}
}
// // non-zero-length segment without any colon ":"
// fn isegment_nz_nc<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
// where
// T: Slice<RangeFrom<usize>>
// + InputTakeAtPosition
// + InputIter
// + InputLength
// + InputTake
// + InputLocate
// + Clone
// + PartialEq,
// <T as InputTakeAtPosition>::Item: AsChar,
// <T as InputIter>::Item: AsChar,
// {
// let inner = alt((
// ignore(iunreserved::many1),
// ignore(pct_encoded),
// ignore(sub_delims::many1),
// ignore(char('@')),
// ));
// let cloned = input.clone();
// match spanned(many1(inner))(input) {
// Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
// Err(e) => Err(e),
// }
// }
fn isegment_nz<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Clone
+ PartialEq,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
let cloned = input.clone();
ipchar::many1(input).add_err_context(cloned, "isegment_nz")
}
fn isegment<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Clone
+ PartialEq
+ std::fmt::Debug,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
let cloned = input.clone();
ipchar::many0(input).add_err_context(cloned, "isegment")
}
fn ipath_empty<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: InputTake,
{
let result = input.take(0);
Ok((input, result))
}
fn ipath_rootless<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Clone
+ PartialEq
+ std::fmt::Debug,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
let inner = tuple((char('/'), isegment));
let full = tuple((isegment_nz, many0(inner)));
let cloned = input.clone();
match spanned(full)(input) {
Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
Err(e) => Err(e).add_err_context(cloned, "ipath_rootless"),
}
}
// fn ipath_noscheme<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
// where
// T: Slice<RangeFrom<usize>>
// + InputTakeAtPosition
// + InputIter
// + InputLength
// + InputTake
// + InputLocate
// + Clone
// + PartialEq
// + std::fmt::Debug,
// <T as InputTakeAtPosition>::Item: AsChar,
// <T as InputIter>::Item: AsChar,
// {
// let inner = tuple((char('/'), isegment));
// let full = tuple((isegment_nz_nc, many0(inner)));
// let cloned = input.clone();
// match spanned(full)(input) {
// Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
// Err(e) => Err(e),
// }
// }
fn ipath_absolute<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Clone
+ PartialEq
+ std::fmt::Debug,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
// "/" [ isegment-nz *( "/" isegment ) ]
let inner = tuple((char('/'), isegment));
let inner = tuple((isegment_nz, many0(inner)));
let full = tuple((char('/'), opt(inner)));
let cloned = input.clone();
match spanned(full)(input) {
Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
Err(e) => Err(e).add_err_context(cloned, "ipath_absolute"),
}
}
fn ipath_abempty<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Clone
+ PartialEq
+ std::fmt::Debug,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
// *( "/" isegment )
let inner = tuple((char('/'), isegment));
let cloned = input.clone();
match spanned(many0(inner))(input) {
Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
Err(e) => Err(e).add_err_context(cloned, "ipath_abempty"),
}
}
// fn ipath<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
// where
// T: Slice<RangeFrom<usize>>
// + InputTakeAtPosition
// + InputIter
// + InputLength
// + InputTake
// + InputLocate
// + Clone
// + PartialEq
// + std::fmt::Debug,
// <T as InputTakeAtPosition>::Item: AsChar,
// <T as InputIter>::Item: AsChar,
// {
// alt((
// ipath_abempty, // begins with "/" or is empty
// ipath_absolute, // begins with "/" but not "//"
// ipath_noscheme, // begins with a non-colon segment
// ipath_rootless, // begins with a segment
// ipath_empty, // zero characters
// ))(input)
// }
fn ireg_name<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Clone
+ PartialEq,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
let inner = alt((
ignore(iunreserved::many1),
ignore(pct_encoded),
ignore(sub_delims::many1),
));
let cloned = input.clone();
match spanned(many0(inner))(input) {
Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
Err(e) => Err(e).add_err_context(cloned, "ireg_name"),
}
}
fn ihost<T, E: ParseError<T>>(input: T) -> IResult<T, Spanned<T>, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Clone
+ PartialEq
+ Compare<&'static str>,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
let cloned = input.clone();
spanned(alt((ip_literal, ip4_address, ireg_name)))(input).add_err_context(cloned, "ihost")
}
fn iuserinfo<T, E: ParseError<T>>(input: T) -> IResult<T, Spanned<T>, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Clone
+ PartialEq,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
let inner = alt((
ignore(iunreserved::many1),
ignore(pct_encoded),
ignore(sub_delims::many1),
ignore(char(':')),
));
let cloned = input.clone();
match spanned(many0(inner))(input) {
Ok((i, spanned)) => Ok((i, spanned.input(&cloned))),
Err(e) => Err(e).add_err_context(cloned, "iuserinfo"),
}
}
struct Authority<T> {
userinfo: Option<Spanned<T>>,
host: Spanned<T>,
port: Option<Spanned<T>>,
value: Spanned<T>,
}
fn iauthority<T, E: ParseError<T>>(input: T) -> IResult<T, Authority<T>, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Compare<&'static str>
+ Clone
+ PartialEq
+ std::fmt::Debug,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
// [ iuserinfo "@" ] ihost [ ":" port ]
let userinfo = opt(terminated(iuserinfo, char('@')));
let host = ihost;
let port = opt(preceded(char(':'), port));
let full = tuple((userinfo, host, port));
let cloned = input.clone();
match spanned(full)(input) {
Ok((i, spanned)) => {
let value = spanned.input(&cloned);
let (userinfo, host, port) = spanned.value;
let authority = Authority {
userinfo,
host,
port,
value,
};
Ok((i, authority))
}
Err(e) => Err(e).add_err_context(cloned, "iauthority"),
}
}
// fn irelative_part<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
// where
// T: Slice<RangeFrom<usize>>
// + InputTakeAtPosition
// + InputIter
// + InputLength
// + InputTake
// + InputLocate
// + Compare<&'static str>
// + Clone
// + PartialEq
// + std::fmt::Debug,
// <T as InputTakeAtPosition>::Item: AsChar,
// <T as InputIter>::Item: AsChar,
// {
// // "//" iauthority ipath-abempty
// // / ipath-absolute
// let alt1 = ignore(tuple((tag("//"), iauthority, ipath_abempty)));
// let alt2 = ignore(ipath_absolute);
// let cloned = input.clone();
// match spanned(alt((alt1, alt2)))(input) {
// Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
// Err(e) => Err(e),
// }
// }
// fn irelative_ref<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
// where
// T: Slice<RangeFrom<usize>>
// + InputTakeAtPosition
// + InputIter
// + InputLength
// + InputTake
// + InputLocate
// + Compare<&'static str>
// + Clone
// + PartialEq
// + std::fmt::Debug,
// <T as InputTakeAtPosition>::Item: AsChar,
// <T as InputIter>::Item: AsChar,
// {
// // irelative-part [ "?" iquery ] [ "#" ifragment ]
// let query = opt(preceded(char('?'), spanned(iquery)));
// let fragment = opt(preceded(char('#'), spanned(ifragment)));
// let parser = tuple((irelative_part, query, fragment));
// let cloned = input.clone();
// match spanned(parser)(input) {
// Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
// Err(e) => Err(e),
// }
// }
struct HierPart<T> {
authority: Option<Authority<T>>,
path: Spanned<T>,
// value: Spanned<T>,
}
fn ihier_part<T, E: ParseError<T>>(input: T) -> IResult<T, HierPart<T>, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Compare<&'static str>
+ Clone
+ PartialEq
+ std::fmt::Debug,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
// "//" iauthority ipath-abempty
// / ipath-absolute
// / ipath-rootless
// / ipath-empty
let alt1 = map(
tuple((tag("//"), iauthority, spanned(ipath_abempty))),
|(_, auth, path)| (Some(auth), path),
);
let alt2 = map(spanned(ipath_absolute), |path| (None, path));
let alt3 = map(spanned(ipath_rootless), |path| (None, path));
let alt4 = map(spanned(ipath_empty), |path| (None, path));
let parser = alt((alt1, alt2, alt3, alt4));
let cloned = input.clone();
match spanned(parser)(input) {
Ok((i, spanned)) => {
// let value = spanned.input(&cloned);
let (authority, path) = spanned.value;
Ok((
i,
HierPart {
authority,
path,
// value,
},
))
}
Err(e) => Err(e).add_err_context(cloned, "ihier_part"),
}
}
// fn absolute_iri<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
// where
// T: Slice<RangeFrom<usize>>
// + InputTakeAtPosition
// + InputIter
// + InputLength
// + InputTake
// + InputLocate
// + Compare<&'static str>
// + Clone
// + PartialEq
// + std::fmt::Debug,
// <T as InputTakeAtPosition>::Item: AsChar,
// <T as InputIter>::Item: AsChar,
// {
// // scheme ":" ihier-part [ "?" iquery ]
// let query = opt(preceded(char('?'), spanned(iquery)));
// let parser = tuple((scheme, char(':'), ihier_part, query));
// let cloned = input.clone();
// match spanned(parser)(input) {
// Ok((i, spanned)) => Ok((i, cloned.take(spanned.len()))),
// Err(e) => Err(e),
// }
// }
// fn iri_reference<T, E: ParseError<T>>(input: T) -> IResult<T, T, E>
// where
// T: Slice<RangeFrom<usize>>
// + InputTakeAtPosition
// + InputIter
// + InputLength
// + InputTake
// + InputLocate
// + UnspecializedInput
// + Compare<&'static str>
// + Clone
// + PartialEq,
// <T as InputTakeAtPosition>::Item: AsChar,
// <T as InputIter>::Item: AsChar,
// {
// // IRI / irelative-ref
// }
struct IriInfo<T> {
scheme: Spanned<T>,
authority: Option<Authority<T>>,
path: Spanned<T>,
query: Option<Spanned<T>>,
fragment: Option<Spanned<T>>,
}
fn iri<T, E: ParseError<T>>(input: T) -> IResult<T, IriInfo<T>, E>
where
T: Slice<RangeFrom<usize>>
+ InputTakeAtPosition
+ InputIter
+ InputLength
+ InputTake
+ InputLocate
+ Compare<&'static str>
+ Clone
+ PartialEq
+ std::fmt::Debug,
<T as InputTakeAtPosition>::Item: AsChar,
<T as InputIter>::Item: AsChar,
{
let query = opt(preceded(char('?'), spanned(iquery)));
let fragment = opt(preceded(char('#'), spanned(ifragment)));
let parser = tuple((scheme, char(':'), ihier_part, query, fragment));
let cloned = input.clone();
match parser(input) {
Ok((i, value)) => {
let (scheme, _, hier_part, query, fragment) = value;
let authority = hier_part.authority;
let path = hier_part.path;
let info = IriInfo {
scheme,
authority,
path,
query,
fragment,
};
Ok((i, info))
}
Err(e) => Err(e).add_err_context(cloned, "iri"),
}
}
pub fn try_parse<I: IntoIri>(s: I) -> Result<<I as IntoIri>::Iri, error::Error> {
match _try_parse(s.borrow()) {
Ok(frames) => Ok(s.into_iri(frames)),
Err(error) => Err(
error::Context {
source: s.borrow().into(),
error: error,
}
.into(),
),
}
}
fn _try_parse(iri_str: &str) -> Result<IriRanges, String> {
let parsed = {
let parser = exact(iri);
let input = WithPos::new(iri_str);
parser(input)
};
match parsed {
Ok((_, info)) => {
let scheme = info.scheme.into_range();
let (authority, userinfo, host, port) = match info.authority {
None => (None, None, None, None),
Some(a) => (
Some(a.value.into_range()),
a.userinfo.map(Spanned::into_range),
Some(a.host.into_range()),
a.port.map(Spanned::into_range),
),
};
let path = info.path.into_range();
let query = info.query.map(Spanned::into_range);
let fragment = info.fragment.map(Spanned::into_range);
let ranges = IriRanges {
scheme,
authority,
userinfo,
host,
port,
path,
query,
fragment,
};
Ok(ranges)
}
Err(Err::Incomplete(_)) => unreachable!(),
Err(Err::Error(e)) => {
let e: VerboseError<_> = e;
let errors = e
.errors
.into_iter()
.map(|(i, kind)| (i.into_inner(), kind))
.collect();
let e = VerboseError { errors };
Err(convert_error(iri_str, e))
}
Err(Err::Failure(_)) => panic!("parser failure."),
}
}
|
pub mod strlen;
pub mod environ;
|
use winit::event_loop::{ControlFlow, EventLoop};
use winit::window::Window;
use winit::event::Event;
use winit_input_helper::WinitInputHelper;
use crate::high_level_fighter::{HighLevelFighter, HighLevelSubaction};
use crate::renderer::wgpu_state::WgpuState;
use crate::renderer::draw::draw_frame;
use crate::renderer::camera::Camera;
pub(crate) mod state;
use state::AppState;
pub(crate) const FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Bgra8Unorm;
/// Opens an interactive window displaying hurtboxes and hitboxes
/// Blocks until user closes window
pub fn render_window(high_level_fighter: &HighLevelFighter, subaction_index: usize) {
let event_loop = EventLoop::new();
let window = Window::new(&event_loop).unwrap();
let high_level_fighter = high_level_fighter.clone();
let subaction = high_level_fighter.subactions[subaction_index].clone();
let mut app = futures::executor::block_on(App::new(window, subaction));
event_loop.run(move |event, _, control_flow| {
app.update(event, control_flow);
});
}
/// Adds an interactive element to the webpage displaying hurtboxes and hitboxes
#[cfg(target_arch = "wasm32")]
pub async fn render_window_wasm(subaction: HighLevelSubaction) {
use winit::platform::web::WindowExtWebSys;
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
use web_sys::HtmlElement;
let event_loop = EventLoop::new();
let window = Window::new(&event_loop).unwrap();
let document = web_sys::window().unwrap().document().unwrap();
let visualiser_span = document.get_element_by_id("visualiser").unwrap();
visualiser_span.append_child(&web_sys::Element::from(window.canvas())).unwrap();
let button = document.get_element_by_id("foo").unwrap();
let button_move = button.clone();
let do_thing = Closure::wrap(
Box::new(move || {
button_move.set_inner_html("何も");
}) as Box<dyn FnMut()>
);
button
.dyn_ref::<HtmlElement>()
.unwrap()
.set_onclick(Some(do_thing.as_ref().unchecked_ref()));
let mut app = App::new(window, subaction).await;
event_loop.run(move |event, _, control_flow| {
app.update(event, control_flow);
});
}
/// Glues together:
/// * AppState: All application logic goes in here
/// * WgpuState: All rendering logic goes in here
/// * Other bits and pieces missing from WgpuState because they aren't needed for rendering to GIF.
pub struct App {
wgpu_state: WgpuState,
app_state: AppState,
input: WinitInputHelper,
_window: Window,
surface: wgpu::Surface,
swap_chain: wgpu::SwapChain,
swap_chain_descriptor: wgpu::SwapChainDescriptor,
subaction: HighLevelSubaction,
}
impl App {
pub async fn new(_window: Window, subaction: HighLevelSubaction) -> App {
let input = WinitInputHelper::new();
let size = _window.inner_size();
let swap_chain_descriptor = wgpu::SwapChainDescriptor {
present_mode: wgpu::PresentMode::Fifo,
usage: wgpu::TextureUsage::RENDER_ATTACHMENT,
format: FORMAT,
width: size.width,
height: size.height,
};
let instance = wgpu::Instance::new(wgpu::BackendBit::PRIMARY);
let surface = unsafe { instance.create_surface(&_window) };
let wgpu_state = WgpuState::new(instance, Some(&surface), FORMAT).await;
let swap_chain = wgpu_state.device.create_swap_chain(&surface, &swap_chain_descriptor);
let camera = Camera::new(
&subaction,
swap_chain_descriptor.width as u16,
swap_chain_descriptor.height as u16,
);
let app_state = AppState::new(camera);
App { wgpu_state, app_state, input, _window, surface, swap_chain, swap_chain_descriptor, subaction }
}
pub fn update(&mut self, event: Event<()>, control_flow: &mut ControlFlow) {
if self.input.update(&event) {
if self.input.quit() {
*control_flow = ControlFlow::Exit;
}
self.app_state.update(&self.input, &self.subaction);
if let Some(size) = self.input.window_resized() {
self.swap_chain_descriptor.width = size.width;
self.swap_chain_descriptor.height = size.height;
self.swap_chain = self.wgpu_state.device.create_swap_chain(&self.surface, &self.swap_chain_descriptor);
}
{
let framebuffer = self.swap_chain.get_current_frame().unwrap().output;
let command_encoder = draw_frame(
&mut self.wgpu_state,
&framebuffer.view,
self.swap_chain_descriptor.width,
self.swap_chain_descriptor.height,
self.app_state.perspective,
self.app_state.wireframe,
self.app_state.render_ecb,
&self.app_state.invulnerable_type,
&self.subaction,
self.app_state.frame_index,
&self.app_state.camera,
);
self.wgpu_state.queue.submit(Some(command_encoder.finish()));
}
}
}
}
|
use super::{arguments::_parse_args, io::_writeOutput, platforms::_platforms};
use ansi_term::Colour;
use futures::{stream::iter, StreamExt};
use reqwest::Client;
use tokio;
#[tokio::main]
pub async fn _takeover(hosts: Vec<String>, threads: usize) -> std::io::Result<()> {
let client = &Client::builder()
.danger_accept_invalid_certs(true)
.build()
.unwrap();
let args = &_parse_args();
let fetches = iter(hosts.into_iter().map(|url| async move {
match client.get(&url).send().await {
Ok(resp) => match resp.text().await {
Ok(text) => {
let platformName = _platforms(text);
match platformName == "None" {
true => {
if args.is_present("verbose") {
println!(
"[{}] {}!",
Colour::Blue.bold().paint("Not Vulnerable"),
url
);
}
}
_ => {
println!(
"[{}]\t{} at {}!",
Colour::Red.bold().paint(&platformName),
Colour::White.bold().paint("Possible Sub-domain Takeover"),
url
);
if args.is_present("output") {
let outputData = format!("[{}] {}\n", platformName, url);
let fileName = args.value_of("output").unwrap();
_writeOutput(fileName.to_string(), outputData);
}
}
}
}
Err(_) => {
if args.is_present("verbose") {
println!(
"[{}]\tAn error occured for [{}].",
Colour::Green.bold().paint("ERROR"),
Colour::White.bold().paint(url)
)
}
}
},
Err(_) => {
if args.is_present("verbose") {
println!(
"[{}]\tAn error occured for [{}].",
Colour::Green.bold().paint("ERROR"),
Colour::White.bold().paint(url)
)
}
}
}
}))
.buffer_unordered(threads)
.collect::<Vec<()>>();
fetches.await;
/*
In case you want to know how it works, here is a more simpler code explaining the overall workflow:
let body = response.text().await?;
if body.contains("<p><strong>There isn't a GitHub Pages site here.</strong></p>") {
println!("GitHub Pages Sub-domain Takeover seems possible!");
}
*/
Ok(())
}
|
use mpi::topology::Communicator;
use crate::prelude::*;
pub unsafe trait ProtocolPart {
unsafe fn build_part() -> Self;
}
pub struct Eps;
unsafe impl ProtocolPart for Eps {
unsafe fn build_part() -> Self {
Self
}
}
impl<C: Communicator> Session<Eps, C> {
pub fn done(self) -> C {
self.comm
}
}
|
extern crate structopt;
use structopt::StructOpt;
#[derive(StructOpt)]
#[structopt(name = "kvs", about = "Key value storage")]
struct Cli {
#[structopt(subcommand)]
cmd: Command,
}
#[derive(StructOpt)]
enum Command {
#[structopt(name = "set")]
/// Set and modify key:value pairings
Set { key: String, val: String },
#[structopt(name = "get")]
/// Access stored key:value pairings
Get { key: String },
#[structopt(name = "rm")]
/// Remove stored key:value pairings
Remove { key: String },
}
fn main() {
let app = Cli::from_args();
match app.cmd {
set => {
eprintln!("unimplemented");
panic!()
}
get => {
eprintln!("unimplemented");
panic!();
}
remove => {
eprintln!("unimplemented");
panic!();
}
};
}
|
use std::fs::File;
use std::io::prelude::*;
use std::io::{Result, SeekFrom};
use std::mem::size_of;
use std::path::Path;
fn cast<T, U>(r: &T) -> &U {
assert_eq!(size_of::<T>(), size_of::<U>());
unsafe { &*(r as *const T as *const U) }
}
fn cast_mut<T, U>(r: &mut T) -> &mut U {
assert_eq!(size_of::<T>(), size_of::<U>());
unsafe { &mut *(r as *mut T as *mut U) }
}
mod fs;
use fs::*;
const N_BITMAP: usize = FS_SIZE / (BLK_SIZE * 8) + 1;
const N_INODES: usize = 200;
const N_INODE_BLOCKS: usize = N_INODES / INODES_PER_BLOCK + 1;
const N_LOG: usize = LOG_SIZE;
/// Number of meta blocks (boot, sb, log, inode, bitmap)
const N_META: usize = 2 + N_LOG + N_INODE_BLOCKS + N_BITMAP;
/// Number of data blocks
const N_BLOCKS: usize = FS_SIZE - N_META;
const ZEROS: [u8; BLK_SIZE] = [0; BLK_SIZE];
struct FsBuilder {
pub file: File,
pub super_block: SuperBlock,
pub free_inode: InodeNum,
pub free_block: usize,
}
impl FsBuilder {
pub fn create<P: AsRef<Path>>(output_file: P, sb: SuperBlock) -> Result<Self> {
let mut builder = Self {
file: std::fs::OpenOptions::new()
.create(true)
.write(true)
.read(true)
.open(output_file)?,
super_block: sb,
free_inode: 1,
free_block: N_META,
};
// clear all
for i in 0..FS_SIZE {
builder.write_sect(i, &ZEROS)?;
}
// write the super block
let mut buf = [0u8; BLK_SIZE];
unsafe {
std::ptr::copy_nonoverlapping(&builder.super_block, buf.as_mut_ptr() as *mut _, 1)
};
builder.write_sect(1, &buf)?;
Ok(builder)
}
fn set_sect<S: Seek>(s: &mut S, sec: usize) -> Result<&mut S> {
s.seek(SeekFrom::Start((sec as u64) * (BLK_SIZE as u64)))?;
Ok(s)
}
fn write_sect(&mut self, sec: usize, buf: &Sector) -> Result<()> {
Self::set_sect(&mut self.file, sec)?.write_all(buf)
}
fn read_sect(&mut self, sec: usize, buf: &mut Sector) -> Result<()> {
Self::set_sect(&mut self.file, sec)?.read_exact(&mut buf[..])
}
fn write_inode(&mut self, inum: InodeNum, ind: &OnDiskInode) -> Result<InodeNum> {
let (bn, idx) = inode_pos(inum, &self.super_block);
let mut sect = [0u8; BLK_SIZE];
self.read_sect(bn, &mut sect)?;
{
let inodes: &mut InodesSector = cast_mut(&mut sect);
inodes[idx] = ind.clone();
}
self.write_sect(bn, §)?;
Ok(inum)
}
fn read_inode(&mut self, inum: InodeNum, out: &mut OnDiskInode) -> Result<InodeNum> {
let (bn, idx) = inode_pos(inum, &self.super_block);
let mut sect = [0u8; BLK_SIZE];
self.read_sect(bn, &mut sect)?;
{
let inodes: &mut InodesSector = cast_mut(&mut sect);
*out = inodes[idx].clone();
}
Ok(inum)
}
fn alloc_block(&mut self) -> Result<()> {
let used = self.free_block;
println!("alloc_block: first {} blocks have been allocated", used);
assert!(used < BLK_SIZE * 8);
let mut buf = [0u8; BLK_SIZE];
for i in 0..used {
buf[i / 8] |= 0x1 << (i % 8);
}
println!(
"alloc_block: write bitmap block at sector {}",
self.super_block.bmap_start
);
self.write_sect(self.super_block.bmap_start as usize, &buf)
}
fn alloc_inode(&mut self, ty: FileType) -> Result<InodeNum> {
let inum = self.free_inode;
self.free_inode += 1;
let mut din = OnDiskInode::default();
din.type_ = (ty as u16).to_le();
din.n_link = 1u16.to_le();
din.size = 0u32.to_le();
self.write_inode(inum, &din)
}
fn take_next_block(&mut self) -> usize {
let r = self.free_block;
self.free_block += 1;
r
}
fn append_inode(&mut self, inum: InodeNum, mut data: &[u8]) -> Result<()> {
let mut din = OnDiskInode::default();
self.read_inode(inum, &mut din)?;
let mut off = din.size.to_le() as usize;
while !data.is_empty() {
let fbn = off / BLK_SIZE;
assert!(fbn < MAX_FILE);
let sect = if fbn < N_DIRECT {
if din.addrs[fbn].to_le() == 0 {
din.addrs[fbn] = self.take_next_block() as u32;
}
din.addrs[fbn].to_le() as usize
} else {
if din.addrs[N_DIRECT].to_le() == 0 {
din.addrs[N_DIRECT] = self.take_next_block() as u32;
}
let mut indirect: [u32; N_INDIRECT] = [0u32; N_INDIRECT];
{
let indirect: &mut Sector = cast_mut(&mut indirect);
self.read_sect(din.addrs[N_DIRECT].to_le() as usize, indirect)?;
}
if indirect[fbn - N_DIRECT] == 0 {
indirect[fbn - N_DIRECT] = self.take_next_block() as u32;
let indirect: &Sector = cast(&indirect);
self.write_sect(din.addrs[N_DIRECT].to_le() as usize, indirect)?;
}
indirect[fbn - N_DIRECT].to_le() as usize
};
let nw = usize::min(data.len(), (fbn + 1) * BLK_SIZE - off);
let mut buf = [0u8; BLK_SIZE];
self.read_sect(sect, &mut buf)?;
let begin = off % BLK_SIZE;
buf[begin..begin + nw].copy_from_slice(&data[..nw]);
self.write_sect(sect, &buf)?;
off += nw;
data = &data[nw..];
}
din.size = off.to_le() as u32;
self.write_inode(inum, &din)?;
Ok(())
}
}
fn main() -> Result<()> {
assert!(BLK_SIZE % size_of::<OnDiskInode>() == 0);
assert!(BLK_SIZE % size_of::<DirEnt>() == 0);
let args: Vec<String> = std::env::args().collect();
if args.len() < 2 {
eprintln!("Usage: mkfs [output-image] [files...]");
return Ok(());
}
let mut builder = {
let mut sb = SuperBlock::default();
sb.size = (FS_SIZE as u32).to_le();
sb.n_blocks = (N_BLOCKS as u32).to_le();
sb.n_inodes = (N_INODES as u32).to_le();
sb.n_log = (N_LOG as u32).to_le();
sb.log_start = 2u32.to_le();
sb.inode_start = (2 + N_LOG as u32).to_le();
sb.bmap_start = (2 + N_LOG as u32 + N_INODE_BLOCKS as u32).to_le();
FsBuilder::create(&args[1], sb)?
};
println!("nmeta {} (boot, super, log blocks {}, inode blocks {}, bitmap blocks {}) blocks {} total {}", N_META, N_LOG, N_INODE_BLOCKS, N_BITMAP, N_BLOCKS, FS_SIZE);
assert_eq!(N_META + N_BLOCKS, FS_SIZE);
let root_ino = builder.alloc_inode(FileType::Directory)?;
assert_eq!(root_ino, ROOT_INO);
{
let mut de = DirEnt::default();
de.inum = root_ino.to_le();
de.set_name(b".");
builder.append_inode(root_ino, de.as_bytes())?;
let mut de = DirEnt::default();
de.inum = root_ino.to_le();
de.set_name(b"..");
builder.append_inode(root_ino, de.as_bytes())?;
}
for filename in &args[2..] {
let mut file = File::open(filename)?;
let filename: &Path = filename.as_ref();
let mut filename = filename.file_name().unwrap().to_str().unwrap().as_bytes();
// Skip leading _ in name when writing to file system.
// The binaries are named _rm, _cat, etc. to keep the
// build operating system from trying to execute them
// in place of system binaries like rm and cat.
if filename.starts_with(b"_") {
filename = &filename[1..];
}
let inum = builder.alloc_inode(FileType::Regular)?;
let mut de = DirEnt::default();
de.inum = inum.to_le();
de.set_name(filename);
builder.append_inode(root_ino, de.as_bytes())?;
let mut buf = [0u8; BLK_SIZE];
loop {
let nread = file.read(&mut buf)?;
if nread == 0 {
// EOF
break;
}
builder.append_inode(inum, &buf[..nread])?;
}
}
// fix size of root inode dir
let mut din = OnDiskInode::default();
builder.read_inode(root_ino, &mut din)?;
let off = din.size.to_le() as usize;
let off = ((off / BLK_SIZE) + 1) * BLK_SIZE;
din.size = (off as u32).to_le();
builder.write_inode(root_ino, &din)?;
// write the bitmap block
builder.alloc_block()?;
Ok(())
}
|
//! Decode a JSON stream to a Rust data structure.
use std::collections::HashSet;
use std::fmt;
use std::str::FromStr;
use async_trait::async_trait;
use bytes::{BufMut, Bytes};
use destream::{de, FromStream, Visitor};
use futures::stream::{Fuse, FusedStream, Stream, StreamExt, TryStreamExt};
#[cfg(feature = "tokio-io")]
use tokio::io::{AsyncRead, AsyncReadExt, BufReader};
use crate::constants::*;
#[async_trait]
pub trait Read: Send + Unpin {
async fn next(&mut self) -> Option<Result<Bytes, Error>>;
fn is_terminated(&self) -> bool;
}
pub struct SourceStream<S> {
source: Fuse<S>,
}
#[async_trait]
impl<S: Stream<Item = Result<Bytes, Error>> + Send + Unpin> Read for SourceStream<S> {
async fn next(&mut self) -> Option<Result<Bytes, Error>> {
self.source.next().await
}
fn is_terminated(&self) -> bool {
self.source.is_terminated()
}
}
impl<S: Stream> From<S> for SourceStream<S> {
fn from(source: S) -> Self {
Self {
source: source.fuse(),
}
}
}
#[cfg(feature = "tokio-io")]
pub struct SourceReader<R: AsyncRead> {
reader: BufReader<R>,
terminated: bool,
}
#[cfg(feature = "tokio-io")]
#[async_trait]
impl<R: AsyncRead + Send + Unpin> Read for SourceReader<R> {
async fn next(&mut self) -> Option<Result<Bytes, Error>> {
let mut chunk = Vec::new();
match self.reader.read_buf(&mut chunk).await {
Ok(0) => {
self.terminated = true;
Some(Ok(chunk.into()))
}
Ok(size) => {
debug_assert_eq!(chunk.len(), size);
Some(Ok(chunk.into()))
}
Err(cause) => Some(Err(de::Error::custom(format!("io error: {}", cause)))),
}
}
fn is_terminated(&self) -> bool {
self.terminated
}
}
#[cfg(feature = "tokio-io")]
impl<R: AsyncRead> From<R> for SourceReader<R> {
fn from(reader: R) -> Self {
Self {
reader: BufReader::new(reader),
terminated: false,
}
}
}
/// An error encountered while decoding a JSON stream.
pub struct Error {
message: String,
}
impl Error {
fn invalid_utf8<I: fmt::Display>(info: I) -> Self {
de::Error::custom(format!("invalid UTF-8: {}", info))
}
fn unexpected_end() -> Self {
de::Error::custom("unexpected end of stream")
}
}
impl std::error::Error for Error {}
impl de::Error for Error {
fn custom<T: fmt::Display>(msg: T) -> Self {
Self {
message: msg.to_string(),
}
}
}
impl fmt::Debug for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.message, f)
}
}
struct MapAccess<'a, S> {
decoder: &'a mut Decoder<S>,
size_hint: Option<usize>,
done: bool,
}
impl<'a, S: Read + 'a> MapAccess<'a, S> {
async fn new(
decoder: &'a mut Decoder<S>,
size_hint: Option<usize>,
) -> Result<MapAccess<'a, S>, Error> {
decoder.expect_whitespace().await?;
decoder.expect_delimiter(MAP_BEGIN).await?;
decoder.expect_whitespace().await?;
let done = decoder.maybe_delimiter(MAP_END).await?;
Ok(MapAccess {
decoder,
size_hint,
done,
})
}
}
#[async_trait]
impl<'a, S: Read + 'a> de::MapAccess for MapAccess<'a, S> {
type Error = Error;
async fn next_key<K: FromStream>(&mut self, context: K::Context) -> Result<Option<K>, Error> {
if self.done {
return Ok(None);
}
self.decoder.expect_whitespace().await?;
let key = K::from_stream(context, self.decoder).await?;
self.decoder.expect_whitespace().await?;
self.decoder.expect_delimiter(COLON).await?;
self.decoder.expect_whitespace().await?;
Ok(Some(key))
}
async fn next_value<V: FromStream>(&mut self, context: V::Context) -> Result<V, Error> {
let value = V::from_stream(context, self.decoder).await?;
self.decoder.expect_whitespace().await?;
if self.decoder.maybe_delimiter(MAP_END).await? {
self.done = true;
} else {
self.decoder.expect_delimiter(COMMA).await?;
}
Ok(value)
}
fn size_hint(&self) -> Option<usize> {
self.size_hint
}
}
struct SeqAccess<'a, S> {
decoder: &'a mut Decoder<S>,
size_hint: Option<usize>,
done: bool,
}
impl<'a, S: Read + 'a> SeqAccess<'a, S> {
async fn new(
decoder: &'a mut Decoder<S>,
size_hint: Option<usize>,
) -> Result<SeqAccess<'a, S>, Error> {
decoder.expect_whitespace().await?;
decoder.expect_delimiter(LIST_BEGIN).await?;
decoder.expect_whitespace().await?;
let done = decoder.maybe_delimiter(LIST_END).await?;
Ok(SeqAccess {
decoder,
size_hint,
done,
})
}
}
#[async_trait]
impl<'a, S: Read + 'a> de::SeqAccess for SeqAccess<'a, S> {
type Error = Error;
async fn next_element<T: FromStream>(
&mut self,
context: T::Context,
) -> Result<Option<T>, Self::Error> {
if self.done {
return Ok(None);
}
self.decoder.expect_whitespace().await?;
let value = T::from_stream(context, self.decoder).await?;
self.decoder.expect_whitespace().await?;
if self.decoder.maybe_delimiter(LIST_END).await? {
self.done = true;
} else {
self.decoder.expect_delimiter(COMMA).await?;
}
Ok(Some(value))
}
fn size_hint(&self) -> Option<usize> {
self.size_hint
}
}
/// A structure that decodes Rust values from a JSON stream.
pub struct Decoder<S> {
source: S,
buffer: Vec<u8>,
numeric: HashSet<u8>,
}
#[cfg(feature = "tokio-io")]
impl<A: AsyncRead> Decoder<A>
where
SourceReader<A>: Read,
{
pub fn from_reader(reader: A) -> Decoder<SourceReader<A>> {
Decoder {
source: SourceReader::from(reader),
buffer: Vec::new(),
numeric: NUMERIC.iter().cloned().collect(),
}
}
}
impl<S: Stream> Decoder<SourceStream<S>>
where
SourceStream<S>: Read,
{
pub fn from_stream(stream: S) -> Decoder<SourceStream<S>> {
Decoder {
source: SourceStream::from(stream),
buffer: Vec::new(),
numeric: NUMERIC.iter().cloned().collect(),
}
}
}
impl<S: Read> Decoder<S> {
async fn buffer(&mut self) -> Result<(), Error> {
if let Some(data) = self.source.next().await {
self.buffer.extend(data?);
}
Ok(())
}
async fn buffer_string(&mut self) -> Result<Vec<u8>, Error> {
self.expect_delimiter(QUOTE).await?;
let mut i = 0;
let mut escaped = false;
loop {
while i >= self.buffer.len() && !self.source.is_terminated() {
self.buffer().await?;
}
if i < self.buffer.len() && &self.buffer[i..i + 1] == QUOTE && !escaped {
break;
} else if self.source.is_terminated() {
return Err(Error::unexpected_end());
}
if escaped {
escaped = false;
} else if self.buffer[i] == ESCAPE[0] {
escaped = true;
}
i += 1;
}
let mut s = Vec::with_capacity(i);
let mut escape = false;
for byte in self.buffer.drain(0..i) {
let as_slice = std::slice::from_ref(&byte);
if escape {
s.put_u8(byte);
escape = false;
} else if as_slice == ESCAPE {
escape = true;
} else {
s.put_u8(byte);
}
}
self.buffer.remove(0); // process the end quote
self.buffer.shrink_to_fit();
Ok(s)
}
async fn buffer_while<F: Fn(u8) -> bool>(&mut self, cond: F) -> Result<usize, Error> {
let mut i = 0;
loop {
while i >= self.buffer.len() && !self.source.is_terminated() {
self.buffer().await?;
}
if i < self.buffer.len() && cond(self.buffer[i]) {
i += 1;
} else if self.source.is_terminated() {
return Ok(i);
} else {
break;
}
}
Ok(i)
}
async fn decode_number<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Error> {
let mut i = 0;
loop {
if self.buffer[i] == DECIMAL[0] {
return de::Decoder::decode_f64(self, visitor).await;
} else if !self.numeric.contains(&self.buffer[i]) {
return de::Decoder::decode_i64(self, visitor).await;
}
i += 1;
while i >= self.buffer.len() && !self.source.is_terminated() {
self.buffer().await?;
}
if self.source.is_terminated() {
return de::Decoder::decode_i64(self, visitor).await;
}
}
}
async fn expect_delimiter(&mut self, delimiter: &'static [u8]) -> Result<(), Error> {
while self.buffer.is_empty() && !self.source.is_terminated() {
self.buffer().await?;
}
if self.buffer.is_empty() {
return Err(Error::unexpected_end());
}
if &self.buffer[0..1] == delimiter {
self.buffer.remove(0);
Ok(())
} else {
Err(de::Error::invalid_value(
self.buffer[0] as char,
&format!("{}", String::from_utf8(delimiter.to_vec()).unwrap()),
))
}
}
async fn expect_whitespace(&mut self) -> Result<(), Error> {
let i = self.buffer_while(|b| (b as char).is_whitespace()).await?;
self.buffer.drain(..i);
Ok(())
}
async fn ignore_value(&mut self) -> Result<(), Error> {
self.expect_whitespace().await?;
while self.buffer.is_empty() && !self.source.is_terminated() {
self.buffer().await?;
}
if self.buffer.is_empty() {
Ok(())
} else {
if self.buffer.starts_with(QUOTE) {
self.parse_string().await?;
} else if self.numeric.contains(&self.buffer[0]) {
self.parse_number::<f64>().await?;
} else if self.buffer[0] == b'n' {
self.parse_unit().await?;
} else {
self.parse_bool().await?;
}
Ok(())
}
}
async fn maybe_delimiter(&mut self, delimiter: &'static [u8]) -> Result<bool, Error> {
while self.buffer.is_empty() && !self.source.is_terminated() {
self.buffer().await?;
}
if self.buffer.is_empty() {
Ok(false)
} else if self.buffer.starts_with(delimiter) {
self.buffer.remove(0);
Ok(true)
} else {
Ok(false)
}
}
async fn parse_bool(&mut self) -> Result<bool, Error> {
self.expect_whitespace().await?;
while self.buffer.len() < 4 && !self.source.is_terminated() {
self.buffer().await?;
}
if self.buffer.is_empty() {
return Err(Error::unexpected_end());
} else if self.buffer.starts_with(TRUE) {
self.buffer.drain(0..4);
return Ok(true);
}
while self.buffer.len() < 5 && !self.source.is_terminated() {
self.buffer().await?;
}
if self.buffer.is_empty() {
return Err(Error::unexpected_end());
} else if self.buffer.starts_with(FALSE) {
self.buffer.drain(0..5);
return Ok(false);
}
let i = Ord::min(self.buffer.len(), 5);
let unknown = String::from_utf8(self.buffer[..i].to_vec()).map_err(Error::invalid_utf8)?;
Err(de::Error::invalid_value(unknown, &"a boolean"))
}
async fn parse_number<N: FromStr>(&mut self) -> Result<N, Error>
where
<N as FromStr>::Err: fmt::Display,
{
self.expect_whitespace().await?;
let numeric = self.numeric.clone();
let i = self.buffer_while(|b| numeric.contains(&b)).await?;
let n = String::from_utf8(self.buffer[0..i].to_vec()).map_err(Error::invalid_utf8)?;
match n.parse() {
Ok(number) => {
self.buffer.drain(..i);
Ok(number)
}
Err(cause) => Err(de::Error::invalid_value(cause, &std::any::type_name::<N>())),
}
}
async fn parse_string(&mut self) -> Result<String, Error> {
let s = self.buffer_string().await?;
String::from_utf8(s).map_err(Error::invalid_utf8)
}
async fn parse_unit(&mut self) -> Result<(), Error> {
self.expect_whitespace().await?;
while self.buffer.len() < 4 && !self.source.is_terminated() {
self.buffer().await?;
}
if self.buffer.starts_with(NULL) {
self.buffer.drain(..NULL.len());
Ok(())
} else {
let i = Ord::min(self.buffer.len(), 5);
let as_str =
String::from_utf8(self.buffer[..i].to_vec()).map_err(Error::invalid_utf8)?;
Err(de::Error::invalid_type(as_str, &"null"))
}
}
}
#[async_trait]
impl<S: Read> de::Decoder for Decoder<S> {
type Error = Error;
async fn decode_any<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
self.expect_whitespace().await?;
while self.buffer.is_empty() && !self.source.is_terminated() {
self.buffer().await?;
}
if self.buffer.is_empty() {
Err(Error::unexpected_end())
} else if self.buffer.starts_with(QUOTE) {
self.decode_string(visitor).await
} else if self.buffer.starts_with(LIST_BEGIN) {
self.decode_seq(visitor).await
} else if self.buffer.starts_with(MAP_BEGIN) {
self.decode_map(visitor).await
} else if self.numeric.contains(&self.buffer[0]) {
self.decode_number(visitor).await
} else if self.buffer.len() >= 5 && self.buffer.starts_with(FALSE) {
self.decode_bool(visitor).await
} else if self.buffer.len() >= 4 && self.buffer.starts_with(TRUE) {
self.decode_bool(visitor).await
} else if self.buffer.len() >= 4 && self.buffer.starts_with(NULL) {
self.decode_option(visitor).await
} else {
while self.buffer.len() < 4 && !self.source.is_terminated() {
self.buffer().await?;
}
if self.buffer.is_empty() {
Err(Error::unexpected_end())
} else if self.buffer.starts_with(TRUE) {
self.decode_bool(visitor).await
} else if self.buffer.starts_with(NULL) {
self.decode_option(visitor).await
} else {
while self.buffer.len() < 5 && !self.source.is_terminated() {
self.buffer().await?;
}
if self.buffer.is_empty() {
Err(Error::unexpected_end())
} else if self.buffer.starts_with(FALSE) {
self.decode_bool(visitor).await
} else {
let i = Ord::min(self.buffer.len(), 5);
let s = String::from_utf8(self.buffer[0..i].to_vec()).map_err(Error::invalid_utf8)?;
Err(de::Error::invalid_value(
s,
&std::any::type_name::<V::Value>(),
))
}
}
}
}
async fn decode_bool<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let b = self.parse_bool().await?;
visitor.visit_bool(b)
}
async fn decode_i8<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let i = self.parse_number().await?;
visitor.visit_i8(i)
}
async fn decode_i16<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let i = self.parse_number().await?;
visitor.visit_i16(i)
}
async fn decode_i32<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let i = self.parse_number().await?;
visitor.visit_i32(i)
}
async fn decode_i64<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let i = self.parse_number().await?;
visitor.visit_i64(i)
}
async fn decode_u8<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let u = self.parse_number().await?;
visitor.visit_u8(u)
}
async fn decode_u16<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let u = self.parse_number().await?;
visitor.visit_u16(u)
}
async fn decode_u32<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let u = self.parse_number().await?;
visitor.visit_u32(u)
}
async fn decode_u64<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let u = self.parse_number().await?;
visitor.visit_u64(u)
}
async fn decode_f32<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let f = self.parse_number().await?;
visitor.visit_f32(f)
}
async fn decode_f64<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let f = self.parse_number().await?;
visitor.visit_f64(f)
}
async fn decode_string<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
self.expect_whitespace().await?;
let s = self.parse_string().await?;
visitor.visit_string(s)
}
async fn decode_byte_buf<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let encoded = self.parse_string().await?;
let decoded = base64::decode(encoded).map_err(de::Error::custom)?;
visitor.visit_byte_buf(decoded)
}
async fn decode_option<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
self.expect_whitespace().await?;
while self.buffer.len() < 4 && !self.source.is_terminated() {
self.buffer().await?;
}
if self.buffer.starts_with(NULL) {
self.buffer.drain(0..4);
visitor.visit_none()
} else {
visitor.visit_some(self).await
}
}
async fn decode_seq<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let access = SeqAccess::new(self, None).await?;
visitor.visit_seq(access).await
}
async fn decode_unit<V: Visitor>(
&mut self,
visitor: V,
) -> Result<<V as Visitor>::Value, Self::Error> {
self.parse_unit().await?;
visitor.visit_unit()
}
async fn decode_tuple<V: Visitor>(
&mut self,
len: usize,
visitor: V,
) -> Result<V::Value, Self::Error> {
let access = SeqAccess::new(self, Some(len)).await?;
visitor.visit_seq(access).await
}
async fn decode_map<V: Visitor>(&mut self, visitor: V) -> Result<V::Value, Self::Error> {
let access = MapAccess::new(self, None).await?;
visitor.visit_map(access).await
}
async fn decode_ignored_any<V: Visitor>(
&mut self,
visitor: V,
) -> Result<V::Value, Self::Error> {
self.ignore_value().await?;
visitor.visit_unit()
}
}
impl<S: Read> From<S> for Decoder<S> {
fn from(source: S) -> Self {
Self {
source,
buffer: vec![],
numeric: NUMERIC.iter().cloned().collect(),
}
}
}
/// Decode the given JSON-encoded stream of bytes into an instance of `T` using the given context.
pub async fn decode<S: Stream<Item = Bytes> + Send + Unpin, T: FromStream>(
context: T::Context,
source: S,
) -> Result<T, Error> {
let source = source.map(Result::<Bytes, Error>::Ok);
T::from_stream(context, &mut Decoder::from(SourceStream::from(source))).await
}
/// Decode the given JSON-encoded stream of bytes into an instance of `T` using the given context.
pub async fn try_decode<
E: fmt::Display,
S: Stream<Item = Result<Bytes, E>> + Send + Unpin,
T: FromStream,
>(
context: T::Context,
source: S,
) -> Result<T, Error> {
let mut decoder = Decoder::from_stream(source.map_err(|e| de::Error::custom(e)));
T::from_stream(context, &mut decoder).await
}
/// Decode the given JSON-encoded stream of bytes into an instance of `T` using the given context.
#[cfg(feature = "tokio-io")]
/// Decode the given JSON-encoded stream of bytes into an instance of `T` using the given context.
pub async fn read_from<S: AsyncReadExt + Send + Unpin, T: FromStream>(
context: T::Context,
source: S,
) -> Result<T, Error> {
T::from_stream(context, &mut Decoder::from(SourceReader::from(source))).await
}
|
use std::fs;
use sg_syntax::{determine_language, SourcegraphQuery};
use syntect::{
html::{ClassStyle, ClassedHTMLGenerator},
parsing::SyntaxSet,
};
fn main() -> Result<(), std::io::Error> {
println!("scip-syntect tester");
let (path, contents) = if let Some(path) = std::env::args().nth(1) {
match fs::read_to_string(&path) {
Ok(contents) => (path, contents),
Err(err) => {
eprintln!("Failed to read path: {:?}. {}", path, err);
return Ok(());
}
}
} else {
eprintln!("Must pass a path as the argument");
return Ok(());
};
let q = SourcegraphQuery {
filepath: path,
code: contents.clone(),
..Default::default()
};
let syntax_set = SyntaxSet::load_defaults_newlines();
let syntax_def = determine_language(&q, &syntax_set).unwrap();
let mut html_generator =
ClassedHTMLGenerator::new_with_class_style(syntax_def, &syntax_set, ClassStyle::Spaced);
for line in contents.lines() {
html_generator.parse_html_for_line_which_includes_newline(line);
}
let html = html_generator.finalize();
println!("{}", html);
Ok(())
}
|
#[doc = "Register `CR` reader"]
pub type R = crate::R<CR_SPEC>;
#[doc = "Register `CR` writer"]
pub type W = crate::W<CR_SPEC>;
#[doc = "Field `HSION` reader - HSI clock enable Set and cleared by software. Set by hardware to force the HSI to ON when the product leaves Stop mode, if STOPWUCK = 0 or STOPKERWUCK = 0. Set by hardware to force the HSI to ON when the product leaves Standby mode or in case of a failure of the HSE which is used as the system clock source. This bit cannot be cleared if the HSI is used directly (via SW mux) as system clock, or if the HSI is selected as reference clock for PLL1 with PLL1 enabled (PLL1ON bit set to 1)."]
pub type HSION_R = crate::BitReader<HSION_A>;
#[doc = "HSI clock enable Set and cleared by software. Set by hardware to force the HSI to ON when the product leaves Stop mode, if STOPWUCK = 0 or STOPKERWUCK = 0. Set by hardware to force the HSI to ON when the product leaves Standby mode or in case of a failure of the HSE which is used as the system clock source. This bit cannot be cleared if the HSI is used directly (via SW mux) as system clock, or if the HSI is selected as reference clock for PLL1 with PLL1 enabled (PLL1ON bit set to 1).\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum HSION_A {
#[doc = "0: Clock Off"]
Off = 0,
#[doc = "1: Clock On"]
On = 1,
}
impl From<HSION_A> for bool {
#[inline(always)]
fn from(variant: HSION_A) -> Self {
variant as u8 != 0
}
}
impl HSION_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> HSION_A {
match self.bits {
false => HSION_A::Off,
true => HSION_A::On,
}
}
#[doc = "Clock Off"]
#[inline(always)]
pub fn is_off(&self) -> bool {
*self == HSION_A::Off
}
#[doc = "Clock On"]
#[inline(always)]
pub fn is_on(&self) -> bool {
*self == HSION_A::On
}
}
#[doc = "Field `HSION` writer - HSI clock enable Set and cleared by software. Set by hardware to force the HSI to ON when the product leaves Stop mode, if STOPWUCK = 0 or STOPKERWUCK = 0. Set by hardware to force the HSI to ON when the product leaves Standby mode or in case of a failure of the HSE which is used as the system clock source. This bit cannot be cleared if the HSI is used directly (via SW mux) as system clock, or if the HSI is selected as reference clock for PLL1 with PLL1 enabled (PLL1ON bit set to 1)."]
pub type HSION_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, HSION_A>;
impl<'a, REG, const O: u8> HSION_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clock Off"]
#[inline(always)]
pub fn off(self) -> &'a mut crate::W<REG> {
self.variant(HSION_A::Off)
}
#[doc = "Clock On"]
#[inline(always)]
pub fn on(self) -> &'a mut crate::W<REG> {
self.variant(HSION_A::On)
}
}
#[doc = "Field `HSIKERON` reader - HSI clock enable in Stop mode Set and reset by software to force the HSI to ON, even in Stop mode, in order to be quickly available as kernel clock for peripherals. This bit has no effect on the value of HSION."]
pub use HSION_R as HSIKERON_R;
#[doc = "Field `HSIKERON` writer - HSI clock enable in Stop mode Set and reset by software to force the HSI to ON, even in Stop mode, in order to be quickly available as kernel clock for peripherals. This bit has no effect on the value of HSION."]
pub use HSION_W as HSIKERON_W;
#[doc = "Field `HSIRDY` reader - HSI clock ready flag Set by hardware to indicate that the HSI oscillator is stable."]
pub type HSIRDY_R = crate::BitReader<HSIRDYR_A>;
#[doc = "HSI clock ready flag Set by hardware to indicate that the HSI oscillator is stable.\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum HSIRDYR_A {
#[doc = "0: Clock not ready"]
NotReady = 0,
#[doc = "1: Clock ready"]
Ready = 1,
}
impl From<HSIRDYR_A> for bool {
#[inline(always)]
fn from(variant: HSIRDYR_A) -> Self {
variant as u8 != 0
}
}
impl HSIRDY_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> HSIRDYR_A {
match self.bits {
false => HSIRDYR_A::NotReady,
true => HSIRDYR_A::Ready,
}
}
#[doc = "Clock not ready"]
#[inline(always)]
pub fn is_not_ready(&self) -> bool {
*self == HSIRDYR_A::NotReady
}
#[doc = "Clock ready"]
#[inline(always)]
pub fn is_ready(&self) -> bool {
*self == HSIRDYR_A::Ready
}
}
#[doc = "Field `HSIDIV` reader - HSI clock divider Set and reset by software. These bits allow selecting a division ratio in order to configure the wanted HSI clock frequency. The HSIDIV cannot be changed if the HSI is selected as reference clock for at least one enabled PLL (PLLxON bit set to 1). In that case, the new HSIDIV value is ignored."]
pub type HSIDIV_R = crate::FieldReader<HSIDIV_A>;
#[doc = "HSI clock divider Set and reset by software. These bits allow selecting a division ratio in order to configure the wanted HSI clock frequency. The HSIDIV cannot be changed if the HSI is selected as reference clock for at least one enabled PLL (PLLxON bit set to 1). In that case, the new HSIDIV value is ignored.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u8)]
pub enum HSIDIV_A {
#[doc = "0: No division"]
Div1 = 0,
#[doc = "1: Division by 2"]
Div2 = 1,
#[doc = "2: Division by 4"]
Div4 = 2,
#[doc = "3: Division by 8"]
Div8 = 3,
}
impl From<HSIDIV_A> for u8 {
#[inline(always)]
fn from(variant: HSIDIV_A) -> Self {
variant as _
}
}
impl crate::FieldSpec for HSIDIV_A {
type Ux = u8;
}
impl HSIDIV_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> HSIDIV_A {
match self.bits {
0 => HSIDIV_A::Div1,
1 => HSIDIV_A::Div2,
2 => HSIDIV_A::Div4,
3 => HSIDIV_A::Div8,
_ => unreachable!(),
}
}
#[doc = "No division"]
#[inline(always)]
pub fn is_div1(&self) -> bool {
*self == HSIDIV_A::Div1
}
#[doc = "Division by 2"]
#[inline(always)]
pub fn is_div2(&self) -> bool {
*self == HSIDIV_A::Div2
}
#[doc = "Division by 4"]
#[inline(always)]
pub fn is_div4(&self) -> bool {
*self == HSIDIV_A::Div4
}
#[doc = "Division by 8"]
#[inline(always)]
pub fn is_div8(&self) -> bool {
*self == HSIDIV_A::Div8
}
}
#[doc = "Field `HSIDIV` writer - HSI clock divider Set and reset by software. These bits allow selecting a division ratio in order to configure the wanted HSI clock frequency. The HSIDIV cannot be changed if the HSI is selected as reference clock for at least one enabled PLL (PLLxON bit set to 1). In that case, the new HSIDIV value is ignored."]
pub type HSIDIV_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 2, O, HSIDIV_A>;
impl<'a, REG, const O: u8> HSIDIV_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
REG::Ux: From<u8>,
{
#[doc = "No division"]
#[inline(always)]
pub fn div1(self) -> &'a mut crate::W<REG> {
self.variant(HSIDIV_A::Div1)
}
#[doc = "Division by 2"]
#[inline(always)]
pub fn div2(self) -> &'a mut crate::W<REG> {
self.variant(HSIDIV_A::Div2)
}
#[doc = "Division by 4"]
#[inline(always)]
pub fn div4(self) -> &'a mut crate::W<REG> {
self.variant(HSIDIV_A::Div4)
}
#[doc = "Division by 8"]
#[inline(always)]
pub fn div8(self) -> &'a mut crate::W<REG> {
self.variant(HSIDIV_A::Div8)
}
}
#[doc = "Field `HSIDIVF` reader - HSI divider flag Set and reset by hardware. As a write operation to HSIDIV has not an immediate effect on the frequency, this flag indicates the current status of the HSI divider. HSIDIVF goes immediately to 0 when HSIDIV value is changed, and is set back to 1 when the output frequency matches the value programmed into HSIDIV. clock setting is completed)"]
pub type HSIDIVF_R = crate::BitReader<HSIDIVFR_A>;
#[doc = "HSI divider flag Set and reset by hardware. As a write operation to HSIDIV has not an immediate effect on the frequency, this flag indicates the current status of the HSI divider. HSIDIVF goes immediately to 0 when HSIDIV value is changed, and is set back to 1 when the output frequency matches the value programmed into HSIDIV. clock setting is completed)\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum HSIDIVFR_A {
#[doc = "0: New HSIDIV ratio has not yet propagated to hsi_ck"]
NotPropagated = 0,
#[doc = "1: HSIDIV ratio has propagated to hsi_ck"]
Propagated = 1,
}
impl From<HSIDIVFR_A> for bool {
#[inline(always)]
fn from(variant: HSIDIVFR_A) -> Self {
variant as u8 != 0
}
}
impl HSIDIVF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> HSIDIVFR_A {
match self.bits {
false => HSIDIVFR_A::NotPropagated,
true => HSIDIVFR_A::Propagated,
}
}
#[doc = "New HSIDIV ratio has not yet propagated to hsi_ck"]
#[inline(always)]
pub fn is_not_propagated(&self) -> bool {
*self == HSIDIVFR_A::NotPropagated
}
#[doc = "HSIDIV ratio has propagated to hsi_ck"]
#[inline(always)]
pub fn is_propagated(&self) -> bool {
*self == HSIDIVFR_A::Propagated
}
}
#[doc = "Field `CSION` reader - CSI clock enable Set and reset by software to enable/disable CSI clock for system and/or peripheral. Set by hardware to force the CSI to ON when the system leaves Stop mode, if STOPWUCK = 1 or STOPKERWUCK = 1. This bit cannot be cleared if the CSI is used directly (via SW mux) as system clock, or if the CSI is selected as reference clock for PLL1 with PLL1 enabled (PLL1ON bit set to 1)."]
pub use HSION_R as CSION_R;
#[doc = "Field `CSIKERON` reader - CSI clock enable in Stop mode Set and reset by software to force the CSI to ON, even in Stop mode, in order to be quickly available as kernel clock for some peripherals. This bit has no effect on the value of CSION."]
pub use HSION_R as CSIKERON_R;
#[doc = "Field `HSI48ON` reader - HSI48 clock enable Set by software and cleared by software or by the hardware when the system enters to Stop or Standby mode."]
pub use HSION_R as HSI48ON_R;
#[doc = "Field `HSEON` reader - HSE clock enable Set and cleared by software. Cleared by hardware to stop the HSE when entering Stop or Standby mode. This bit cannot be cleared if the HSE is used directly (via SW mux) as system clock, or if the HSE is selected as reference clock for PLL1 with PLL1 enabled (PLL1ON bit set to 1)."]
pub use HSION_R as HSEON_R;
#[doc = "Field `CSION` writer - CSI clock enable Set and reset by software to enable/disable CSI clock for system and/or peripheral. Set by hardware to force the CSI to ON when the system leaves Stop mode, if STOPWUCK = 1 or STOPKERWUCK = 1. This bit cannot be cleared if the CSI is used directly (via SW mux) as system clock, or if the CSI is selected as reference clock for PLL1 with PLL1 enabled (PLL1ON bit set to 1)."]
pub use HSION_W as CSION_W;
#[doc = "Field `CSIKERON` writer - CSI clock enable in Stop mode Set and reset by software to force the CSI to ON, even in Stop mode, in order to be quickly available as kernel clock for some peripherals. This bit has no effect on the value of CSION."]
pub use HSION_W as CSIKERON_W;
#[doc = "Field `HSI48ON` writer - HSI48 clock enable Set by software and cleared by software or by the hardware when the system enters to Stop or Standby mode."]
pub use HSION_W as HSI48ON_W;
#[doc = "Field `HSEON` writer - HSE clock enable Set and cleared by software. Cleared by hardware to stop the HSE when entering Stop or Standby mode. This bit cannot be cleared if the HSE is used directly (via SW mux) as system clock, or if the HSE is selected as reference clock for PLL1 with PLL1 enabled (PLL1ON bit set to 1)."]
pub use HSION_W as HSEON_W;
#[doc = "Field `CSIRDY` reader - CSI clock ready flag Set by hardware to indicate that the CSI oscillator is stable. This bit is activated only if the RC is enabled by CSION (it is not activated if the CSI is enabled by CSIKERON or by a peripheral request)."]
pub use HSIRDY_R as CSIRDY_R;
#[doc = "Field `HSI48RDY` reader - HSI48 clock ready flag Set by hardware to indicate that the HSI48 oscillator is stable."]
pub use HSIRDY_R as HSI48RDY_R;
#[doc = "Field `CPUCKRDY` reader - CPU related clocks ready flag Set by hardware to indicate that the CPU related clocks (CPU, APB3, AXI bus matrix and related memories) are available."]
pub use HSIRDY_R as CPUCKRDY_R;
#[doc = "Field `CDCKRDY` reader - CPU domain clocks ready flag Set by hardware to indicate that the following CPU domain clocks are available: APB1, APB2, AHB bus matrix."]
pub use HSIRDY_R as CDCKRDY_R;
#[doc = "Field `HSERDY` reader - HSE clock ready flag Set by hardware to indicate that the HSE oscillator is stable."]
pub use HSIRDY_R as HSERDY_R;
#[doc = "Field `HSEBYP` reader - HSE clock bypass Set and cleared by software to bypass the oscillator with an external clock. The external clock must be enabled with the HSEON bit to be used by the device. The HSEBYP bit can be written only if the HSE oscillator is disabled."]
pub type HSEBYP_R = crate::BitReader<HSEBYP_A>;
#[doc = "HSE clock bypass Set and cleared by software to bypass the oscillator with an external clock. The external clock must be enabled with the HSEON bit to be used by the device. The HSEBYP bit can be written only if the HSE oscillator is disabled.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum HSEBYP_A {
#[doc = "0: HSE crystal oscillator not bypassed"]
NotBypassed = 0,
#[doc = "1: HSE crystal oscillator bypassed with external clock"]
Bypassed = 1,
}
impl From<HSEBYP_A> for bool {
#[inline(always)]
fn from(variant: HSEBYP_A) -> Self {
variant as u8 != 0
}
}
impl HSEBYP_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> HSEBYP_A {
match self.bits {
false => HSEBYP_A::NotBypassed,
true => HSEBYP_A::Bypassed,
}
}
#[doc = "HSE crystal oscillator not bypassed"]
#[inline(always)]
pub fn is_not_bypassed(&self) -> bool {
*self == HSEBYP_A::NotBypassed
}
#[doc = "HSE crystal oscillator bypassed with external clock"]
#[inline(always)]
pub fn is_bypassed(&self) -> bool {
*self == HSEBYP_A::Bypassed
}
}
#[doc = "Field `HSEBYP` writer - HSE clock bypass Set and cleared by software to bypass the oscillator with an external clock. The external clock must be enabled with the HSEON bit to be used by the device. The HSEBYP bit can be written only if the HSE oscillator is disabled."]
pub type HSEBYP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, HSEBYP_A>;
impl<'a, REG, const O: u8> HSEBYP_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "HSE crystal oscillator not bypassed"]
#[inline(always)]
pub fn not_bypassed(self) -> &'a mut crate::W<REG> {
self.variant(HSEBYP_A::NotBypassed)
}
#[doc = "HSE crystal oscillator bypassed with external clock"]
#[inline(always)]
pub fn bypassed(self) -> &'a mut crate::W<REG> {
self.variant(HSEBYP_A::Bypassed)
}
}
#[doc = "Field `HSECSSON` reader - HSE clock security system enable Set by software to enable clock security system on HSE. This bit is â\u{80}\u{9c}set onlyâ\u{80}\u{9d} (disabled by a system reset or when the system enters in Standby mode). When HSECSSON is set, the clock detector is enabled by hardware when the HSE is ready and disabled by hardware if an oscillator failure is detected."]
pub use HSION_R as HSECSSON_R;
#[doc = "Field `HSECSSON` writer - HSE clock security system enable Set by software to enable clock security system on HSE. This bit is â\u{80}\u{9c}set onlyâ\u{80}\u{9d} (disabled by a system reset or when the system enters in Standby mode). When HSECSSON is set, the clock detector is enabled by hardware when the HSE is ready and disabled by hardware if an oscillator failure is detected."]
pub use HSION_W as HSECSSON_W;
#[doc = "Field `HSEEXT` reader - external high speed clock type in Bypass mode Set and reset by software to select the external clock type (analog or digital). The external clock must be enabled with the HSEON bit to be used by the device. The HSEEXT bit can be written only if the HSE oscillator is disabled."]
pub type HSEEXT_R = crate::BitReader;
#[doc = "Field `HSEEXT` writer - external high speed clock type in Bypass mode Set and reset by software to select the external clock type (analog or digital). The external clock must be enabled with the HSEON bit to be used by the device. The HSEEXT bit can be written only if the HSE oscillator is disabled."]
pub type HSEEXT_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PLL1ON` reader - PLL1 enable Set and cleared by software to enable PLL1. Cleared by hardware when entering Stop or Standby mode. Note that the hardware prevents writing this bit to 0, if the PLL1 output is used as the system clock."]
pub use HSION_R as PLL1ON_R;
#[doc = "Field `PLL2ON` reader - PLL2 enable Set and cleared by software to enable PLL2. Cleared by hardware when entering Stop or Standby mode."]
pub use HSION_R as PLL2ON_R;
#[doc = "Field `PLL3ON` reader - PLL3 enable Set and cleared by software to enable PLL3. Cleared by hardware when entering Stop or Standby mode."]
pub use HSION_R as PLL3ON_R;
#[doc = "Field `PLL1ON` writer - PLL1 enable Set and cleared by software to enable PLL1. Cleared by hardware when entering Stop or Standby mode. Note that the hardware prevents writing this bit to 0, if the PLL1 output is used as the system clock."]
pub use HSION_W as PLL1ON_W;
#[doc = "Field `PLL2ON` writer - PLL2 enable Set and cleared by software to enable PLL2. Cleared by hardware when entering Stop or Standby mode."]
pub use HSION_W as PLL2ON_W;
#[doc = "Field `PLL3ON` writer - PLL3 enable Set and cleared by software to enable PLL3. Cleared by hardware when entering Stop or Standby mode."]
pub use HSION_W as PLL3ON_W;
#[doc = "Field `PLL1RDY` reader - PLL1 clock ready flag Set by hardware to indicate that the PLL1 is locked."]
pub use HSIRDY_R as PLL1RDY_R;
#[doc = "Field `PLL2RDY` reader - PLL2 clock ready flag Set by hardware to indicate that the PLL2 is locked."]
pub use HSIRDY_R as PLL2RDY_R;
#[doc = "Field `PLL3RDY` reader - PLL3 clock ready flag Set by hardware to indicate that the PLL3 is locked."]
pub use HSIRDY_R as PLL3RDY_R;
impl R {
#[doc = "Bit 0 - HSI clock enable Set and cleared by software. Set by hardware to force the HSI to ON when the product leaves Stop mode, if STOPWUCK = 0 or STOPKERWUCK = 0. Set by hardware to force the HSI to ON when the product leaves Standby mode or in case of a failure of the HSE which is used as the system clock source. This bit cannot be cleared if the HSI is used directly (via SW mux) as system clock, or if the HSI is selected as reference clock for PLL1 with PLL1 enabled (PLL1ON bit set to 1)."]
#[inline(always)]
pub fn hsion(&self) -> HSION_R {
HSION_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - HSI clock enable in Stop mode Set and reset by software to force the HSI to ON, even in Stop mode, in order to be quickly available as kernel clock for peripherals. This bit has no effect on the value of HSION."]
#[inline(always)]
pub fn hsikeron(&self) -> HSIKERON_R {
HSIKERON_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - HSI clock ready flag Set by hardware to indicate that the HSI oscillator is stable."]
#[inline(always)]
pub fn hsirdy(&self) -> HSIRDY_R {
HSIRDY_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bits 3:4 - HSI clock divider Set and reset by software. These bits allow selecting a division ratio in order to configure the wanted HSI clock frequency. The HSIDIV cannot be changed if the HSI is selected as reference clock for at least one enabled PLL (PLLxON bit set to 1). In that case, the new HSIDIV value is ignored."]
#[inline(always)]
pub fn hsidiv(&self) -> HSIDIV_R {
HSIDIV_R::new(((self.bits >> 3) & 3) as u8)
}
#[doc = "Bit 5 - HSI divider flag Set and reset by hardware. As a write operation to HSIDIV has not an immediate effect on the frequency, this flag indicates the current status of the HSI divider. HSIDIVF goes immediately to 0 when HSIDIV value is changed, and is set back to 1 when the output frequency matches the value programmed into HSIDIV. clock setting is completed)"]
#[inline(always)]
pub fn hsidivf(&self) -> HSIDIVF_R {
HSIDIVF_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 7 - CSI clock enable Set and reset by software to enable/disable CSI clock for system and/or peripheral. Set by hardware to force the CSI to ON when the system leaves Stop mode, if STOPWUCK = 1 or STOPKERWUCK = 1. This bit cannot be cleared if the CSI is used directly (via SW mux) as system clock, or if the CSI is selected as reference clock for PLL1 with PLL1 enabled (PLL1ON bit set to 1)."]
#[inline(always)]
pub fn csion(&self) -> CSION_R {
CSION_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - CSI clock ready flag Set by hardware to indicate that the CSI oscillator is stable. This bit is activated only if the RC is enabled by CSION (it is not activated if the CSI is enabled by CSIKERON or by a peripheral request)."]
#[inline(always)]
pub fn csirdy(&self) -> CSIRDY_R {
CSIRDY_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - CSI clock enable in Stop mode Set and reset by software to force the CSI to ON, even in Stop mode, in order to be quickly available as kernel clock for some peripherals. This bit has no effect on the value of CSION."]
#[inline(always)]
pub fn csikeron(&self) -> CSIKERON_R {
CSIKERON_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 12 - HSI48 clock enable Set by software and cleared by software or by the hardware when the system enters to Stop or Standby mode."]
#[inline(always)]
pub fn hsi48on(&self) -> HSI48ON_R {
HSI48ON_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 13 - HSI48 clock ready flag Set by hardware to indicate that the HSI48 oscillator is stable."]
#[inline(always)]
pub fn hsi48rdy(&self) -> HSI48RDY_R {
HSI48RDY_R::new(((self.bits >> 13) & 1) != 0)
}
#[doc = "Bit 14 - CPU related clocks ready flag Set by hardware to indicate that the CPU related clocks (CPU, APB3, AXI bus matrix and related memories) are available."]
#[inline(always)]
pub fn cpuckrdy(&self) -> CPUCKRDY_R {
CPUCKRDY_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - CPU domain clocks ready flag Set by hardware to indicate that the following CPU domain clocks are available: APB1, APB2, AHB bus matrix."]
#[inline(always)]
pub fn cdckrdy(&self) -> CDCKRDY_R {
CDCKRDY_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - HSE clock enable Set and cleared by software. Cleared by hardware to stop the HSE when entering Stop or Standby mode. This bit cannot be cleared if the HSE is used directly (via SW mux) as system clock, or if the HSE is selected as reference clock for PLL1 with PLL1 enabled (PLL1ON bit set to 1)."]
#[inline(always)]
pub fn hseon(&self) -> HSEON_R {
HSEON_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - HSE clock ready flag Set by hardware to indicate that the HSE oscillator is stable."]
#[inline(always)]
pub fn hserdy(&self) -> HSERDY_R {
HSERDY_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - HSE clock bypass Set and cleared by software to bypass the oscillator with an external clock. The external clock must be enabled with the HSEON bit to be used by the device. The HSEBYP bit can be written only if the HSE oscillator is disabled."]
#[inline(always)]
pub fn hsebyp(&self) -> HSEBYP_R {
HSEBYP_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - HSE clock security system enable Set by software to enable clock security system on HSE. This bit is â\u{80}\u{9c}set onlyâ\u{80}\u{9d} (disabled by a system reset or when the system enters in Standby mode). When HSECSSON is set, the clock detector is enabled by hardware when the HSE is ready and disabled by hardware if an oscillator failure is detected."]
#[inline(always)]
pub fn hsecsson(&self) -> HSECSSON_R {
HSECSSON_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - external high speed clock type in Bypass mode Set and reset by software to select the external clock type (analog or digital). The external clock must be enabled with the HSEON bit to be used by the device. The HSEEXT bit can be written only if the HSE oscillator is disabled."]
#[inline(always)]
pub fn hseext(&self) -> HSEEXT_R {
HSEEXT_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 24 - PLL1 enable Set and cleared by software to enable PLL1. Cleared by hardware when entering Stop or Standby mode. Note that the hardware prevents writing this bit to 0, if the PLL1 output is used as the system clock."]
#[inline(always)]
pub fn pll1on(&self) -> PLL1ON_R {
PLL1ON_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - PLL1 clock ready flag Set by hardware to indicate that the PLL1 is locked."]
#[inline(always)]
pub fn pll1rdy(&self) -> PLL1RDY_R {
PLL1RDY_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - PLL2 enable Set and cleared by software to enable PLL2. Cleared by hardware when entering Stop or Standby mode."]
#[inline(always)]
pub fn pll2on(&self) -> PLL2ON_R {
PLL2ON_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - PLL2 clock ready flag Set by hardware to indicate that the PLL2 is locked."]
#[inline(always)]
pub fn pll2rdy(&self) -> PLL2RDY_R {
PLL2RDY_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - PLL3 enable Set and cleared by software to enable PLL3. Cleared by hardware when entering Stop or Standby mode."]
#[inline(always)]
pub fn pll3on(&self) -> PLL3ON_R {
PLL3ON_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - PLL3 clock ready flag Set by hardware to indicate that the PLL3 is locked."]
#[inline(always)]
pub fn pll3rdy(&self) -> PLL3RDY_R {
PLL3RDY_R::new(((self.bits >> 29) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - HSI clock enable Set and cleared by software. Set by hardware to force the HSI to ON when the product leaves Stop mode, if STOPWUCK = 0 or STOPKERWUCK = 0. Set by hardware to force the HSI to ON when the product leaves Standby mode or in case of a failure of the HSE which is used as the system clock source. This bit cannot be cleared if the HSI is used directly (via SW mux) as system clock, or if the HSI is selected as reference clock for PLL1 with PLL1 enabled (PLL1ON bit set to 1)."]
#[inline(always)]
#[must_use]
pub fn hsion(&mut self) -> HSION_W<CR_SPEC, 0> {
HSION_W::new(self)
}
#[doc = "Bit 1 - HSI clock enable in Stop mode Set and reset by software to force the HSI to ON, even in Stop mode, in order to be quickly available as kernel clock for peripherals. This bit has no effect on the value of HSION."]
#[inline(always)]
#[must_use]
pub fn hsikeron(&mut self) -> HSIKERON_W<CR_SPEC, 1> {
HSIKERON_W::new(self)
}
#[doc = "Bits 3:4 - HSI clock divider Set and reset by software. These bits allow selecting a division ratio in order to configure the wanted HSI clock frequency. The HSIDIV cannot be changed if the HSI is selected as reference clock for at least one enabled PLL (PLLxON bit set to 1). In that case, the new HSIDIV value is ignored."]
#[inline(always)]
#[must_use]
pub fn hsidiv(&mut self) -> HSIDIV_W<CR_SPEC, 3> {
HSIDIV_W::new(self)
}
#[doc = "Bit 7 - CSI clock enable Set and reset by software to enable/disable CSI clock for system and/or peripheral. Set by hardware to force the CSI to ON when the system leaves Stop mode, if STOPWUCK = 1 or STOPKERWUCK = 1. This bit cannot be cleared if the CSI is used directly (via SW mux) as system clock, or if the CSI is selected as reference clock for PLL1 with PLL1 enabled (PLL1ON bit set to 1)."]
#[inline(always)]
#[must_use]
pub fn csion(&mut self) -> CSION_W<CR_SPEC, 7> {
CSION_W::new(self)
}
#[doc = "Bit 9 - CSI clock enable in Stop mode Set and reset by software to force the CSI to ON, even in Stop mode, in order to be quickly available as kernel clock for some peripherals. This bit has no effect on the value of CSION."]
#[inline(always)]
#[must_use]
pub fn csikeron(&mut self) -> CSIKERON_W<CR_SPEC, 9> {
CSIKERON_W::new(self)
}
#[doc = "Bit 12 - HSI48 clock enable Set by software and cleared by software or by the hardware when the system enters to Stop or Standby mode."]
#[inline(always)]
#[must_use]
pub fn hsi48on(&mut self) -> HSI48ON_W<CR_SPEC, 12> {
HSI48ON_W::new(self)
}
#[doc = "Bit 16 - HSE clock enable Set and cleared by software. Cleared by hardware to stop the HSE when entering Stop or Standby mode. This bit cannot be cleared if the HSE is used directly (via SW mux) as system clock, or if the HSE is selected as reference clock for PLL1 with PLL1 enabled (PLL1ON bit set to 1)."]
#[inline(always)]
#[must_use]
pub fn hseon(&mut self) -> HSEON_W<CR_SPEC, 16> {
HSEON_W::new(self)
}
#[doc = "Bit 18 - HSE clock bypass Set and cleared by software to bypass the oscillator with an external clock. The external clock must be enabled with the HSEON bit to be used by the device. The HSEBYP bit can be written only if the HSE oscillator is disabled."]
#[inline(always)]
#[must_use]
pub fn hsebyp(&mut self) -> HSEBYP_W<CR_SPEC, 18> {
HSEBYP_W::new(self)
}
#[doc = "Bit 19 - HSE clock security system enable Set by software to enable clock security system on HSE. This bit is â\u{80}\u{9c}set onlyâ\u{80}\u{9d} (disabled by a system reset or when the system enters in Standby mode). When HSECSSON is set, the clock detector is enabled by hardware when the HSE is ready and disabled by hardware if an oscillator failure is detected."]
#[inline(always)]
#[must_use]
pub fn hsecsson(&mut self) -> HSECSSON_W<CR_SPEC, 19> {
HSECSSON_W::new(self)
}
#[doc = "Bit 20 - external high speed clock type in Bypass mode Set and reset by software to select the external clock type (analog or digital). The external clock must be enabled with the HSEON bit to be used by the device. The HSEEXT bit can be written only if the HSE oscillator is disabled."]
#[inline(always)]
#[must_use]
pub fn hseext(&mut self) -> HSEEXT_W<CR_SPEC, 20> {
HSEEXT_W::new(self)
}
#[doc = "Bit 24 - PLL1 enable Set and cleared by software to enable PLL1. Cleared by hardware when entering Stop or Standby mode. Note that the hardware prevents writing this bit to 0, if the PLL1 output is used as the system clock."]
#[inline(always)]
#[must_use]
pub fn pll1on(&mut self) -> PLL1ON_W<CR_SPEC, 24> {
PLL1ON_W::new(self)
}
#[doc = "Bit 26 - PLL2 enable Set and cleared by software to enable PLL2. Cleared by hardware when entering Stop or Standby mode."]
#[inline(always)]
#[must_use]
pub fn pll2on(&mut self) -> PLL2ON_W<CR_SPEC, 26> {
PLL2ON_W::new(self)
}
#[doc = "Bit 28 - PLL3 enable Set and cleared by software to enable PLL3. Cleared by hardware when entering Stop or Standby mode."]
#[inline(always)]
#[must_use]
pub fn pll3on(&mut self) -> PLL3ON_W<CR_SPEC, 28> {
PLL3ON_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`cr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`cr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CR_SPEC;
impl crate::RegisterSpec for CR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`cr::R`](R) reader structure"]
impl crate::Readable for CR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`cr::W`](W) writer structure"]
impl crate::Writable for CR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CR to value 0x25"]
impl crate::Resettable for CR_SPEC {
const RESET_VALUE: Self::Ux = 0x25;
}
|
// MIT License
// Copyright (c) 2020 Andrew Plaza
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use clap::{load_yaml, value_t, App};
use derive_builder::Builder;
use primitives::H256;
use std::{path::PathBuf, str::FromStr};
#[derive(Default, Builder, Debug)]
pub struct Configuration {
pub hash: Option<H256>,
pub out: Option<PathBuf>,
pub url: Option<String>,
}
pub fn parse_args() -> Configuration {
let yaml = load_yaml!("cli.yml");
let matches = App::from_yaml(yaml).get_matches();
let mut config = ConfigurationBuilder::default();
let hash = if let Some(h) = value_t!(matches, "hash", String).ok() {
Some(H256::from_str(&h).expect("Hash should be H256 type"))
} else {
None
};
config.hash(hash);
let url = value_t!(matches, "url", String) .ok();
let out = value_t!(matches, "out", PathBuf).ok();
config.url(url);
config.out(out);
config.build().expect("Could not build config")
}
|
use super::error::Error;
use std::fs::{self, File, OpenOptions};
use std::path::{Path, PathBuf};
use std::sync::Arc;
pub enum FileKind {
Data,
Index,
Log(u64),
}
pub enum OpenMode {
Read,
Write,
}
pub struct SeriesDir {
base_path: PathBuf,
}
impl SeriesDir {
fn file_path(&self, kind: FileKind) -> PathBuf {
self.base_path.join(match kind {
FileKind::Data => "series.dat".to_owned(),
FileKind::Index => "series.idx".to_owned(),
FileKind::Log(s) => format!("series.log.{}", s),
})
}
pub fn open(&self, kind: FileKind, mode: OpenMode) -> Result<File, Error> {
let path = self.file_path(kind);
let mut options = OpenOptions::new();
let options = match mode {
OpenMode::Read => options.read(true),
OpenMode::Write => options.read(true).write(true).create(true),
};
Ok(options.open(&path)?)
}
fn parse_log_filename(&self, s: &str) -> Option<u64> {
s.strip_prefix("series.log.")
.and_then(|suffix| suffix.parse::<u64>().ok())
}
pub fn read_log_sequences(&self) -> Result<Vec<u64>, Error> {
let mut sequences = fs::read_dir(&self.base_path)?
.filter_map(|entry| entry.ok())
.filter_map(|entry| entry.file_name().into_string().ok())
.filter_map(|entry| self.parse_log_filename(&entry))
.collect::<Vec<u64>>();
sequences.sort_unstable();
sequences.reverse();
Ok(sequences)
}
pub fn remove_log(&self, seq: u64) -> Result<(), Error> {
Ok(fs::remove_file(self.file_path(FileKind::Log(seq)))?)
}
}
pub struct FileSystem {
base_path: PathBuf,
}
impl FileSystem {
pub fn series<S: AsRef<str>>(&self, name: S) -> Result<Arc<SeriesDir>, Error> {
let base_path = self.base_path.join("series").join(name.as_ref());
fs::create_dir_all(&base_path)?;
Ok(Arc::new(SeriesDir { base_path }))
}
pub fn rename_series<S: AsRef<str>>(&self, src: S, dst: S) -> Result<(), Error> {
let src_path = self.base_path.join("series").join(src.as_ref());
let dst_path = self.base_path.join("series").join(dst.as_ref());
Ok(fs::rename(src_path, dst_path)?)
}
pub fn get_series(&self) -> Result<Vec<String>, Error> {
let mut series = Vec::new();
for entry in fs::read_dir(self.base_path.join("series"))? {
let series_path = entry?.path().clone();
if series_path.join("series.dat").is_file() {
if let Some(filename) = series_path
.file_name()
.and_then(|f| f.to_owned().into_string().ok())
{
series.push(filename);
}
}
}
series.sort();
Ok(series)
}
}
pub fn open<P: AsRef<Path>>(base_path: P) -> Result<FileSystem, Error> {
fs::create_dir_all(base_path.as_ref().join("series"))?;
Ok(FileSystem {
base_path: base_path.as_ref().to_owned(),
})
}
#[cfg(test)]
pub mod test {
use super::*;
use std::time::{SystemTime, UNIX_EPOCH};
use std::ops::Deref;
pub struct TempFS {
pub fs: FileSystem,
path: PathBuf,
}
impl Drop for TempFS {
fn drop(&mut self) {
fs::remove_dir_all(&self.path).unwrap();
}
}
impl Deref for TempFS {
type Target = FileSystem;
fn deref(&self) -> &Self::Target {
&self.fs
}
}
pub fn open() -> Result<TempFS, Error> {
let path = PathBuf::from(format!(
"temp-dir-{:?}",
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_nanos()
));
Ok(TempFS {
fs: super::open(&path)?,
path: path.clone(),
})
}
}
|
use std::{net::SocketAddr};
use msg_types::{AnnouncePublic, AnnounceSecret, CallResponse};
use mio::Token;
use crate::common::{encryption::SymmetricEncryption, lib::read_exact, message_type::{MsgType, msg_types::{self, Call}, Peer}};
use super::{CallRequest, RendezvousServer};
impl RendezvousServer {
pub fn read_tcp_message(&mut self, msg_size: &[u8], token: Token) {
let sock = self.tcp_connections.get_mut(&token).unwrap();
let addr = sock.peer_addr().unwrap();
let msg_size: u64 = bincode::deserialize(msg_size).unwrap();
let mut encrypted = vec![0; msg_size as usize];
read_exact(sock, &mut encrypted[..]);
let mut msg = match self.sym_keys.get(&addr) {
Some(sym_key) => sym_key.decrypt(&mut encrypted[..]), // Peer has already announce, use the symmetric key
None => {
match self.peers.iter().find(|p| p.addr.unwrap() == addr) {
Some(p) => p.sym_key.as_ref().unwrap().decrypt(&mut encrypted[..]),
None => self.encryption.decrypt(&mut encrypted[..]) // Peer hasn't announced yet, use the asymmetric key
}
}
};
let msg_type = num::FromPrimitive::from_u8(msg[0]);
match msg_type {
Some(MsgType::AnnounceSecret) => {
let announcement: msg_types::AnnounceSecret = bincode::deserialize(&mut msg[1..]).unwrap();
self.on_secret_announce(addr, announcement);
}
Some(MsgType::Announce) => {
let announcement: msg_types::AnnouncePublic = bincode::deserialize(&mut msg[1..]).unwrap();
self.on_announce(addr, announcement);
}
Some(MsgType::Call) => {
let mut call: msg_types::Call = bincode::deserialize(&mut msg[1..]).unwrap();
self.on_call(addr, &mut call);
}
Some(MsgType::CallResponse) => {
let call_response: msg_types::CallResponse = bincode::deserialize(&mut msg[1..]).unwrap();
self.on_call_response(addr, call_response);
}
_ => unreachable!()
}
}
/// After receiving the secret, wait for the public key to arrive
fn on_secret_announce(&mut self, addr: SocketAddr, announcement: AnnounceSecret) {
let secret = SymmetricEncryption::new_from_secret(&announcement.secret[..]);
self.sym_keys.insert(addr, secret);
}
fn on_announce(&mut self, addr: SocketAddr, announcement: AnnouncePublic) {
let p = Peer {
addr: Some(addr),
udp_addr: None,
public_key: announcement.public_key,
sym_key: Some(self.sym_keys.remove(&addr).unwrap())
};
println!("Received public key for peer ({}): {}", p.addr.unwrap(), p.public_key);
// Notify the new client of the connections
let sock = self.tcp_connections.iter_mut().find(|(_, c)| c.peer_addr().unwrap() == addr).unwrap().1;
RendezvousServer::send_tcp_message(sock, MsgType::Announce, &self.peers.to_vec().iter_mut().map(|x| x.safe_clone()).collect::<Vec<_>>());
// Notify everyone else of the new connection
for c in self.tcp_connections.values_mut().filter(|c| c.peer_addr().unwrap() != addr) {
RendezvousServer::send_tcp_message(c, MsgType::Announce, &[p.safe_clone()].to_vec());
}
self.peers.push(p);
}
fn on_call(&mut self, addr: SocketAddr, call: &mut Call) {
if let Some(caller) = self.peers.iter().find(|x| x.addr.unwrap() == addr) {
if let Some(callee) = self.peers.iter().find(|x| x.public_key == call.callee) {
if caller.udp_addr.is_none() || callee.udp_addr.is_none() {
let caller_token = self.addresses.get(&caller.addr.unwrap()).unwrap();
let mut caller_socket = self.tcp_connections.get_mut(caller_token).unwrap();
RendezvousServer::send_tcp_message(&mut caller_socket, MsgType::CallResponse, &CallResponse{
call: call.clone(),
response: false
});
println!("Error routing a call from ({}; {}) to ({}; {}) udp address hasn't been found", addr, caller.public_key, callee.addr.unwrap(), callee.public_key);
}
else {
let req = CallRequest{
caller: caller.clone(),
callee: callee.clone(),
};
self.calls.push(req);
// Don't trust the client
call.caller = Some(caller.clone().public_key);
let token = self.addresses.get(&callee.addr.unwrap()).unwrap();
let mut callee_socket = self.tcp_connections.get_mut(token).unwrap();
call.udp_address = caller.udp_addr;
RendezvousServer::send_tcp_message(&mut callee_socket, MsgType::Call, &call);
println!("Routed a call from ({}; {}) to ({}; {})", addr, caller.public_key, callee_socket.peer_addr().unwrap(), callee.public_key);
}
}
else {
println!("Callee haven't announced itself yet");
}
}
else {
println!("Caller haven't announced itself yet. ({})", addr);
}
}
fn on_call_response(&mut self, _: SocketAddr, call_response: CallResponse) {
let callee = call_response.call.callee;
let caller = call_response.call.caller.unwrap();
match self.calls.iter().position(|x| x.callee.public_key == callee && x.caller.public_key == caller) {
Some(index) => {
if call_response.response {
println!("Peer ({}) accepted the call request from ({})", callee, caller);
let caller_peer = self.peers.iter().find(|p| p.public_key == caller).unwrap();
let callee_peer = self.peers.iter().find(|p| p.public_key == callee).unwrap().clone();
let mut sock = self.tcp_connections.values_mut().find(|x| x.peer_addr().unwrap() == caller_peer.addr.unwrap()).unwrap();
let msg = msg_types::CallResponse {
call: Call {
callee,
caller: Some(caller),
udp_address: Some(callee_peer.udp_addr.unwrap())
},
response: call_response.response,
};
RendezvousServer::send_tcp_message(&mut sock, MsgType::CallResponse, &msg);
}
else {
println!("Peer ({}) denied the call request from ({})", callee, caller);
}
self.calls.remove(index);
}
None => {
println!("Peer ({}) accepted call that wasn't in the database", callee);
}
}
}
} |
use alloc::{vec, vec::Vec};
use crate::Renderer;
pub enum TextureFormat {
Rgba8Unorm,
Bgra8Unorm,
Rgba16Float,
Depth32,
}
impl TextureFormat {
pub(crate) fn wgpu_type(&self) -> wgpu::TextureFormat {
match self {
TextureFormat::Rgba8Unorm => wgpu::TextureFormat::Rgba8Unorm,
TextureFormat::Bgra8Unorm => wgpu::TextureFormat::Bgra8Unorm,
TextureFormat::Rgba16Float => wgpu::TextureFormat::Rgba16Float,
TextureFormat::Depth32 => wgpu::TextureFormat::Depth32Float,
}
}
pub(crate) fn bytes_per_row(&self) -> usize {
match self {
TextureFormat::Rgba8Unorm => 4,
TextureFormat::Bgra8Unorm => 4,
TextureFormat::Rgba16Float => 8,
TextureFormat::Depth32 => 4,
}
}
}
pub enum CompressedTextureFormat {
BC1,
BC2,
BC3,
}
impl CompressedTextureFormat {
pub(crate) fn decoded_format(&self) -> TextureFormat {
match self {
CompressedTextureFormat::BC1 => TextureFormat::Rgba8Unorm,
CompressedTextureFormat::BC2 => TextureFormat::Rgba8Unorm,
CompressedTextureFormat::BC3 => TextureFormat::Rgba8Unorm,
}
}
}
pub struct Texture {
pub(crate) texture_view: wgpu::TextureView,
}
impl Texture {
pub fn new(renderer: &Renderer, width: u32, height: u32, format: TextureFormat) -> Self {
let extent = wgpu::Extent3d { width, height, depth: 1 };
let texture = renderer.device.create_texture(&wgpu::TextureDescriptor {
size: extent,
array_layer_count: 1,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: format.wgpu_type(),
usage: wgpu::TextureUsage::SAMPLED | wgpu::TextureUsage::COPY_DST | wgpu::TextureUsage::OUTPUT_ATTACHMENT,
label: None,
});
let texture_view = texture.create_default_view();
Self { texture_view }
}
pub async fn with_texels(renderer: &Renderer, width: u32, height: u32, texels: &[u8], format: TextureFormat) -> Self {
let extent = wgpu::Extent3d { width, height, depth: 1 };
let texture = renderer.device.create_texture(&wgpu::TextureDescriptor {
size: extent,
array_layer_count: 1,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: format.wgpu_type(),
usage: wgpu::TextureUsage::SAMPLED | wgpu::TextureUsage::COPY_DST | wgpu::TextureUsage::OUTPUT_ATTACHMENT,
label: None,
});
let texture_view = texture.create_default_view();
let buffer = renderer.buffer_pool.alloc(texels.len());
buffer.write(texels).await.unwrap();
renderer.enqueue_texture_upload(buffer, texture, format.bytes_per_row() * extent.width as usize, extent);
Self { texture_view }
}
pub async fn with_compressed_texels(renderer: &Renderer, width: u32, height: u32, data: &[u8], format: CompressedTextureFormat) -> Self {
let uncompressed = Self::decode_texture(data, width, height, &format);
Self::with_texels(renderer, width, height, &uncompressed, format.decoded_format()).await
}
fn decode_texture(data: &[u8], width: u32, height: u32, format: &CompressedTextureFormat) -> Vec<u8> {
let result_size = (width as usize) * (height as usize) * 4; // RGBA
let mut result = vec![0; result_size];
let format = match format {
CompressedTextureFormat::BC1 => squish::Format::Bc1,
CompressedTextureFormat::BC2 => squish::Format::Bc2,
CompressedTextureFormat::BC3 => squish::Format::Bc3,
};
format.decompress(data, width as usize, height as usize, result.as_mut());
result
}
}
|
use crate::config::cache::{Cache, DiskBasedCache};
use crate::config::dfinity::Config;
use crate::config::{cache, dfx_version};
use crate::lib::error::DfxResult;
use crate::lib::identity::identity_manager::IdentityManager;
use crate::lib::network::network_descriptor::NetworkDescriptor;
use crate::lib::progress_bar::ProgressBar;
use anyhow::{anyhow, Context};
use ic_agent::{Agent, Identity};
use ic_types::Principal;
use semver::Version;
use slog::{Logger, Record};
use std::collections::BTreeMap;
use std::fs::create_dir_all;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
use std::time::Duration;
pub trait Environment {
fn get_cache(&self) -> Arc<dyn Cache>;
fn get_config(&self) -> Option<Arc<Config>>;
fn get_config_or_anyhow(&self) -> anyhow::Result<Arc<Config>>;
fn is_in_project(&self) -> bool;
/// Return a temporary directory for configuration if none exists
/// for the current project or if not in a project. Following
/// invocations by other processes in the same project should
/// return the same configuration directory.
fn get_temp_dir(&self) -> &Path;
/// Return the directory where state for replica(s) is kept.
fn get_state_dir(&self) -> PathBuf;
fn get_version(&self) -> &Version;
/// This is value of the name passed to dfx `--identity <name>`
/// Notably, it is _not_ the name of the default identity or selected identity
fn get_identity_override(&self) -> &Option<String>;
// Explicit lifetimes are actually needed for mockall to work properly.
#[allow(clippy::needless_lifetimes)]
fn get_agent<'a>(&'a self) -> Option<&'a Agent>;
#[allow(clippy::needless_lifetimes)]
fn get_network_descriptor<'a>(&'a self) -> Option<&'a NetworkDescriptor>;
fn get_logger(&self) -> &slog::Logger;
fn new_spinner(&self, message: &str) -> ProgressBar;
fn new_progress(&self, message: &str) -> ProgressBar;
// Explicit lifetimes are actually needed for mockall to work properly.
#[allow(clippy::needless_lifetimes)]
fn log<'a>(&self, record: &Record<'a>) {
self.get_logger().log(record);
}
fn get_selected_identity(&self) -> Option<&String>;
fn get_selected_identity_principal(&self) -> Option<Principal>;
}
pub struct EnvironmentImpl {
config: Option<Arc<Config>>,
temp_dir: PathBuf,
cache: Arc<dyn Cache>,
version: Version,
logger: Option<slog::Logger>,
progress: bool,
identity_override: Option<String>,
}
impl EnvironmentImpl {
pub fn new() -> DfxResult<Self> {
let config = match Config::from_current_dir() {
Err(err) => {
if err.kind() == std::io::ErrorKind::NotFound {
Ok(None)
} else {
Err(err)
}
}
Ok(x) => Ok(Some(x)),
}?;
let temp_dir = match &config {
None => tempfile::tempdir()
.expect("Could not create a temporary directory.")
.into_path(),
Some(c) => c.get_path().parent().unwrap().join(".dfx"),
};
create_dir_all(&temp_dir)?;
// Figure out which version of DFX we should be running. This will use the following
// fallback sequence:
// 1. DFX_VERSION environment variable
// 2. dfx.json "dfx" field
// 3. this binary's version
// If any of those are empty string, we stop the fallback and use the current version.
// If any of those are a valid version, we try to use that directly as is.
// If any of those are an invalid version, we will show an error to the user.
let version = match std::env::var("DFX_VERSION") {
Err(_) => match &config {
None => dfx_version().clone(),
Some(c) => match &c.get_config().get_dfx() {
None => dfx_version().clone(),
Some(v) => Version::parse(&v)?,
},
},
Ok(v) => {
if v.is_empty() {
dfx_version().clone()
} else {
Version::parse(&v)?
}
}
};
Ok(EnvironmentImpl {
cache: Arc::new(DiskBasedCache::with_version(&version)),
config: config.map(Arc::new),
temp_dir,
version: version.clone(),
logger: None,
progress: true,
identity_override: None,
})
}
pub fn with_logger(mut self, logger: slog::Logger) -> Self {
self.logger = Some(logger);
self
}
pub fn with_progress_bar(mut self, progress: bool) -> Self {
self.progress = progress;
self
}
pub fn with_identity_override(mut self, identity: Option<String>) -> Self {
self.identity_override = identity;
self
}
}
impl Environment for EnvironmentImpl {
fn get_cache(&self) -> Arc<dyn Cache> {
Arc::clone(&self.cache)
}
fn get_config(&self) -> Option<Arc<Config>> {
self.config.as_ref().map(|x| Arc::clone(x))
}
fn get_config_or_anyhow(&self) -> anyhow::Result<Arc<Config>> {
self.get_config().ok_or_else(|| anyhow!(
"Cannot find dfx configuration file in the current working directory. Did you forget to create one?"
))
}
fn is_in_project(&self) -> bool {
self.config.is_some()
}
fn get_temp_dir(&self) -> &Path {
&self.temp_dir
}
fn get_state_dir(&self) -> PathBuf {
self.get_temp_dir().join("state")
}
fn get_version(&self) -> &Version {
&self.version
}
fn get_identity_override(&self) -> &Option<String> {
&self.identity_override
}
fn get_agent(&self) -> Option<&Agent> {
// create an AgentEnvironment explicitly, in order to specify network and agent.
// See install, build for examples.
None
}
fn get_network_descriptor(&self) -> Option<&NetworkDescriptor> {
// create an AgentEnvironment explicitly, in order to specify network and agent.
// See install, build for examples.
None
}
fn get_logger(&self) -> &slog::Logger {
self.logger
.as_ref()
.expect("Log was not setup, but is being used.")
}
fn new_spinner(&self, message: &str) -> ProgressBar {
if self.progress {
ProgressBar::new_spinner(message)
} else {
ProgressBar::discard()
}
}
fn new_progress(&self, _message: &str) -> ProgressBar {
ProgressBar::discard()
}
fn get_selected_identity(&self) -> Option<&String> {
None
}
fn get_selected_identity_principal(&self) -> Option<Principal> {
None
}
}
pub struct AgentEnvironment<'a> {
backend: &'a dyn Environment,
agent: Agent,
network_descriptor: NetworkDescriptor,
identity_manager: IdentityManager,
}
impl<'a> AgentEnvironment<'a> {
pub fn new(
backend: &'a dyn Environment,
network_descriptor: NetworkDescriptor,
timeout: Duration,
) -> DfxResult<Self> {
let mut identity_manager = IdentityManager::new(backend)?;
let identity = identity_manager.instantiate_selected_identity()?;
let agent_url = network_descriptor.providers.first().unwrap();
Ok(AgentEnvironment {
backend,
agent: create_agent(backend.get_logger().clone(), agent_url, identity, timeout)
.expect("Failed to construct agent."),
network_descriptor,
identity_manager,
})
}
}
impl<'a> Environment for AgentEnvironment<'a> {
fn get_cache(&self) -> Arc<dyn Cache> {
self.backend.get_cache()
}
fn get_config(&self) -> Option<Arc<Config>> {
self.backend.get_config()
}
fn get_config_or_anyhow(&self) -> anyhow::Result<Arc<Config>> {
self.get_config().ok_or_else(|| anyhow!(
"Cannot find dfx configuration file in the current working directory. Did you forget to create one?"
))
}
fn is_in_project(&self) -> bool {
self.backend.is_in_project()
}
fn get_temp_dir(&self) -> &Path {
self.backend.get_temp_dir()
}
fn get_state_dir(&self) -> PathBuf {
self.backend.get_state_dir()
}
fn get_version(&self) -> &Version {
self.backend.get_version()
}
fn get_identity_override(&self) -> &Option<String> {
self.backend.get_identity_override()
}
fn get_agent(&self) -> Option<&Agent> {
Some(&self.agent)
}
fn get_network_descriptor(&self) -> Option<&NetworkDescriptor> {
Some(&self.network_descriptor)
}
fn get_logger(&self) -> &slog::Logger {
self.backend.get_logger()
}
fn new_spinner(&self, message: &str) -> ProgressBar {
self.backend.new_spinner(message)
}
fn new_progress(&self, message: &str) -> ProgressBar {
self.backend.new_progress(message)
}
fn get_selected_identity(&self) -> Option<&String> {
Some(self.identity_manager.get_selected_identity_name())
}
fn get_selected_identity_principal(&self) -> Option<Principal> {
self.identity_manager.get_selected_identity_principal()
}
}
pub struct AgentClient {
logger: Logger,
url: reqwest::Url,
// The auth `(username, password)`.
auth: Arc<Mutex<Option<(String, String)>>>,
}
impl AgentClient {
pub fn new(logger: Logger, url: String) -> DfxResult<AgentClient> {
let url = reqwest::Url::parse(&url).context(format!("Invalid URL: {}", url))?;
let result = Self {
logger,
url,
auth: Arc::new(Mutex::new(None)),
};
if let Ok(Some(auth)) = result.read_http_auth() {
result.auth.lock().unwrap().replace(auth);
}
Ok(result)
}
fn http_auth_path() -> DfxResult<PathBuf> {
Ok(cache::get_cache_root()?.join("http_auth"))
}
// A connection is considered secure if it goes to an HTTPs scheme or if it's the
// localhost (which cannot be spoofed).
fn is_secure(&self) -> bool {
self.url.scheme() == "https" || self.url.host_str().unwrap_or("") == "localhost"
}
fn read_http_auth_map(&self) -> DfxResult<BTreeMap<String, String>> {
let p = &Self::http_auth_path()?;
let content = std::fs::read_to_string(p)?;
// If there's an error parsing, simply use an empty map.
Ok(
serde_json::from_slice::<BTreeMap<String, String>>(content.as_bytes())
.unwrap_or_else(|_| BTreeMap::new()),
)
}
fn read_http_auth(&self) -> DfxResult<Option<(String, String)>> {
match self.url.host() {
None => Ok(None),
Some(h) => {
let map = self.read_http_auth_map()?;
if let Some(token) = map.get(&h.to_string()) {
if !self.is_secure() {
slog::warn!(
self.logger,
"HTTP Auth was found, but protocol is not secure. Refusing to use the token."
);
Ok(None)
} else {
// For backward compatibility with previous versions of DFX, we still
// store the base64 encoding of `username:password`, but we decode it
// since the Agent requires username and password as separate fields.
let pair = base64::decode(&token).unwrap();
let v: Vec<String> = String::from_utf8_lossy(pair.as_slice())
.split(':')
.take(2)
.map(|s| s.to_owned())
.collect();
Ok(Some((v[0].to_owned(), v[1].to_owned())))
}
} else {
Ok(None)
}
}
}
}
fn save_http_auth(&self, host: &str, auth: &str) -> DfxResult<PathBuf> {
let mut map = self
.read_http_auth_map()
.unwrap_or_else(|_| BTreeMap::new());
map.insert(host.to_string(), auth.to_string());
let p = Self::http_auth_path()?;
std::fs::write(&p, serde_json::to_string(&map)?.as_bytes())?;
Ok(p)
}
}
impl ic_agent::agent::http_transport::PasswordManager for AgentClient {
fn cached(&self, _url: &str) -> Result<Option<(String, String)>, String> {
// Support for HTTP Auth if necessary (tries to contact first, then do the HTTP Auth
// flow).
if let Some(auth) = self.auth.lock().unwrap().as_ref() {
Ok(Some(auth.clone()))
} else {
Ok(None)
}
}
fn required(&self, _url: &str) -> Result<(String, String), String> {
eprintln!("Unauthorized HTTP Access... Please enter credentials:");
let username = dialoguer::Input::<String>::new()
.with_prompt("Username")
.interact()
.unwrap();
let password = dialoguer::Password::new()
.with_prompt("Password")
.interact()
.unwrap();
let auth = format!("{}:{}", username, password);
let auth = base64::encode(&auth);
self.auth
.lock()
.unwrap()
.replace((username.clone(), password.clone()));
if let Some(h) = &self.url.host() {
if let Ok(p) = self.save_http_auth(&h.to_string(), &auth) {
slog::info!(
self.logger,
"Saved HTTP credentials to {}.",
p.to_string_lossy()
);
}
}
Ok((username, password))
}
}
fn create_agent(
logger: Logger,
url: &str,
identity: Box<dyn Identity + Send + Sync>,
timeout: Duration,
) -> Option<Agent> {
AgentClient::new(logger, url.to_string())
.ok()
.and_then(|executor| {
Agent::builder()
.with_transport(
ic_agent::agent::http_transport::ReqwestHttpReplicaV2Transport::create(url)
.unwrap()
.with_password_manager(executor),
)
.with_boxed_identity(identity)
.with_ingress_expiry(Some(timeout))
.build()
.ok()
})
}
|
#[derive(Debug, Clone)]
pub struct ClickEvent {
pub x: i32,
pub y: i32,
}
#[derive(Debug, Clone)]
pub enum Event {
MouseDown(ClickEvent),
MouseUp(ClickEvent),
}
pub type EventTypes = u32;
pub const QUIT: u32 = 1;
pub const CLICK: u32 = 2;
pub const MOUSE_MOVE: u32 = 4;
|
extern crate ares;
#[macro_use]
mod util;
#[test]
fn basic_types() {
// TODO: test 1-arg and 2-arg
eval_ok!("(= 1 1)", true);
eval_ok!("(= 2 2 2 2)", true);
eval_ok!("(= 1 2)", false);
eval_ok!("(= 1 1 2)", false);
}
|
use super::prelude::*;
// Register the actual session middleware that is used to maintain session state.
// `CookieSession` is an actual session processing backend
// that does the initialization of the state of the `Session` instance inside the application (ServiceRequest::get_session)
// when the request is received.
// And when the request is processed `CookieSession` gets the latest state content
// in the `Session` and sets it to the client
// (all this is done in the middleware of `CookieSession`).
pub(crate) fn session_middleware() -> CookieSession {
CookieSession::signed(
&env::var("SESSION_TOKEN")
.expect("Without `SESSION_TOKEN` set in .env")
.into_bytes(),
)
.domain(
env::var("SCHEDULER_COOKIE_DOMAIN").expect("Without `SCHEDULER_COOKIE_DOMAIN` set in .env"),
)
.name(env::var("SCHEDULER_NAME").expect("Without `SCHEDULER_NAME` set in .env"))
.http_only(true)
.secure(false)
}
// Register authentication middleware to check login status based on `CookieSession`.
pub(crate) fn auth_middleware() -> SessionAuth {
SessionAuth
}
// The public middleware output type.
type MiddlewareFuture<T, E> = Pin<Box<dyn Future<Output = Result<T, E>>>>;
pub struct SessionAuth;
impl<S, B> Transform<S> for SessionAuth
where
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = ActixWebError>,
S::Future: 'static,
B: 'static,
{
type Request = ServiceRequest;
type Response = ServiceResponse<B>;
type Error = ActixWebError;
type InitError = ();
type Transform = SessionAuthMiddleware<S>;
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ok(SessionAuthMiddleware { service })
}
}
pub struct SessionAuthMiddleware<S> {
service: S,
}
impl<S, B> Service for SessionAuthMiddleware<S>
where
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = ActixWebError>,
S::Future: 'static,
B: 'static,
{
type Request = ServiceRequest;
type Response = ServiceResponse<B>;
type Error = ActixWebError;
type Future = MiddlewareFuture<Self::Response, Self::Error>;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.service.poll_ready(cx)
}
fn call(&mut self, req: ServiceRequest) -> Self::Future {
let session = req.get_session();
let uri = req.uri();
let path = uri.path();
// Judgment, if it is a special api will not check the token.
// (for example: login-api, event-collection-api)
match path {
"/api/user/login" | "/api/task_logs/event_trigger" => {
let fut = self.service.call(req);
Box::pin(async move {
let res = fut.await?;
Ok(res)
})
}
_ => {
if let Ok(Some(_)) = session.get::<u64>("user_id") {
let fut = self.service.call(req);
Box::pin(async move {
let res = fut.await?;
Ok(res)
})
} else {
Box::pin(async move {
Ok(req.error_response(
HttpResponseBuilder::new(StatusCode::default()).json(
UnifiedResponseMessages::<()>::error().customized_error_msg(
String::from("Please log in and operate."),
),
),
))
})
}
}
}
}
}
|
use std::marker::PhantomData;
use std::any::{Any};
use std::rc::Rc;
use std::sync::atomic::AtomicIsize;
use observable::*;
use subscriber::*;
use unsub_ref::UnsubRef;
use std::sync::Arc;
use std::sync::atomic::Ordering;
use scheduler::Scheduler;
pub struct SubOnOp<Src, V, Sch> where Src : Observable<V>+Send+Sync, Sch: Scheduler+Send+Sync
{
source: Arc<Src>,
scheduler: Arc<Sch>,
PhantomData: PhantomData<V>
}
pub trait ObservableSubOn<Src, V, Sch> where Src : Observable<V>+Send+Sync, Sch: Scheduler+Send+Sync
{
fn sub_on(self, scheduler: Arc<Sch>) -> SubOnOp<Src, V, Sch> ;
}
impl<Src, V, Sch> ObservableSubOn<Src, V, Sch> for Src where Src : Observable<V>+Send+Sync, Sch: Scheduler+Send+Sync
{
fn sub_on(self, scheduler: Arc<Sch>) -> SubOnOp<Src, V, Sch>
{
SubOnOp{ scheduler, PhantomData, source: Arc::new(self) }
}
}
impl<Src, V:'static+Send+Sync, Sch> Observable< V> for SubOnOp<Src, V, Sch> where Src : 'static + Observable<V>+Send+Sync, Sch: Scheduler+Send+Sync
{
fn sub(&self, dest: Arc<Observer<V>+Send+Sync>) -> UnsubRef<'static>
{
let src = self.source.clone();
self.scheduler.schedule(move ||{
src.sub(dest)
})
}
}
#[cfg(test)]
mod test
{
use super::*;
use subject::*;
use scheduler::*;
use op::*;
use fac::*;
use std::thread;
use std::time::Duration;
#[test]
fn basic()
{
println!("src thread: {:?}", thread::current().id());
let src = Arc::new(rxfac::timer(100, Some(100), NewThreadScheduler::get()));
src.take(30).sub_on(NewThreadScheduler::get()).subn(|v| println!("next {} thread: {:?}", v, thread::current().id() ));
thread::sleep(Duration::from_secs(10));
}
} |
mod lv2_raw;
mod lv2;
mod synth;
use std::ptr;
use std::mem;
use std::f32;
use std::ffi;
use std::os::raw;
use std::collections::BTreeMap;
use lv2_raw::core::*;
use lv2_raw::urid::*;
use lv2_raw::atom::*;
use lv2_raw::midi::*;
use lv2::atom::*;
use lv2::urid::*;
use lv2::core::*;
use lv2::midi::*;
const CONTROL_INPUT: u32 = 0;
const SYNTH_OUTPUT: u32 = 1;
const WAVEFORM: u32 = 2;
const ATTACK: u32 = 3;
const DECAY: u32 = 4;
const SUSTAIN: u32 = 5;
const RELEASE: u32 = 6;
const SEC_WAVEFORM: u32 = 7;
const SEC_FREQ_MUL: u32 = 8;
const SEC_DEPTH: u32 = 9;
const FILTER_FREQ: u32 = 10;
const FILTER_ON: u32 = 11;
#[derive(Debug)]
pub struct SamplerUris {
pub atom_Blank: LV2_URID,
pub atom_Int: LV2_URID,
pub atom_Long: LV2_URID,
pub atom_Float: LV2_URID,
pub atom_Object: LV2_URID,
pub atom_Path: LV2_URID,
pub atom_Property: LV2_URID,
pub atom_Resource: LV2_URID,
pub atom_Sequence: LV2_URID,
pub atom_URID: LV2_URID,
pub atom_eventTransfer: LV2_URID,
pub midi_Event: LV2_URID,
pub patch_Set: LV2_URID,
pub patch_property: LV2_URID,
pub patch_value: LV2_URID,
pub time_frame: LV2_URID,
pub time_framesPerSecond: LV2_URID,
pub time_speed: LV2_URID,
pub time_Position: LV2_URID,
}
pub fn map_sampler_uris(map: *const LV2_URID_Map) -> SamplerUris {
SamplerUris {
atom_Blank: urid_for_const(map, LV2_ATOM_Blank),
atom_Int: urid_for_const(map, LV2_ATOM_Int),
atom_Long: urid_for_const(map, LV2_ATOM_Long),
atom_Float: urid_for_const(map, LV2_ATOM_Float),
atom_Object: urid_for_const(map, LV2_ATOM_Object),
atom_Path: urid_for_const(map, LV2_ATOM_Path),
atom_Property: urid_for_const(map, LV2_ATOM_Property),
atom_Resource: urid_for_const(map, LV2_ATOM_Resource),
atom_Sequence: urid_for_const(map, LV2_ATOM_Sequence),
atom_URID: urid_for_const(map, LV2_ATOM_URID),
atom_eventTransfer: urid_for_const(map, LV2_ATOM_eventTransfer),
midi_Event: urid_for_const(map, LV2_MIDI_MidiEvent),
patch_Set: urid_for_const(map, LV2_PATCH_Set),
patch_property: urid_for_const(map, LV2_PATCH_property),
patch_value: urid_for_const(map, LV2_PATCH_value),
time_frame: urid_for_const(map, LV2_TIME_frame),
time_framesPerSecond: urid_for_const(map, LV2_TIME_framesPerSecond),
time_speed: urid_for_const(map, LV2_TIME_speed),
time_Position: urid_for_const(map, LV2_TIME_Position),
}
}
#[repr(C)]
struct Amp {
input: *const LV2_Atom,
output: *mut f32,
waveform: *mut f32,
attack: *mut f32,
decay: *mut f32,
sustain: *mut f32,
release: *mut f32,
sec_waveform: *mut f32,
sec_freq_mul: *mut f32,
sec_depth: *mut f32,
filter_freq: *mut f32,
filter_on: *mut f32,
synth: synth::ToneIterator,
sampler_uris: SamplerUris,
}
const AMP_URI: *const u8 = b"http://quaddmg.com/plugins/synthz\0" as *const u8;
const LV2DESCRIPTOR: LV2_Descriptor = LV2_Descriptor {
URI: AMP_URI as *const raw::c_char,
instantiate,
connect_port,
activate,
run,
deactivate,
cleanup,
extension_data
};
struct UridExtractor<'a> {
urid_uri: &'a ffi::CStr,
urid_map: Option<*const LV2_URID_Map>
}
impl <'a> UridExtractor<'a> {
fn new() -> UridExtractor<'a> {
unsafe {
UridExtractor {
urid_uri: ffi::CStr::from_ptr(LV2_URID_map as *const raw::c_char),
urid_map: None
}
}
}
}
impl <'a> FeatureExtractor for UridExtractor<'a> {
fn matches(&self, item: &ffi::CStr) -> bool {
*item == *self.urid_uri
}
fn store(&mut self, data: *const raw::c_void) {
unsafe {
self.urid_map = Some(data as *const LV2_URID_Map);
}
}
}
extern fn instantiate(descriptor: *const LV2_Descriptor,
rate: f64,
path: *const raw::c_char,
features: *const *const LV2_Feature) -> LV2_Handle {
println!("SynthZ instantiate");
let mut urid_extractor = UridExtractor::new();
extract_features(features, vec!(&mut urid_extractor));
let mut urid_map = urid_extractor.urid_map.unwrap();
let mut amp = Box::new(Amp {
input: std::ptr::null_mut(),
output: std::ptr::null_mut(),
waveform: std::ptr::null_mut(),
sec_waveform: std::ptr::null_mut(),
sec_freq_mul: std::ptr::null_mut(),
sec_depth: std::ptr::null_mut(),
attack: std::ptr::null_mut(),
decay: std::ptr::null_mut(),
sustain: std::ptr::null_mut(),
release: std::ptr::null_mut(),
filter_freq: std::ptr::null_mut(),
filter_on: std::ptr::null_mut(),
synth: synth::ToneIterator::new(rate as f32),
sampler_uris: map_sampler_uris(urid_map),
});
println!("{:?}", amp.sampler_uris);
Box::into_raw(amp) as LV2_Handle
}
extern fn connect_port(instance: LV2_Handle, port: u32, data: *mut raw::c_void) {
let mut pamp = instance as *mut Amp;
unsafe {
let amp = &mut *pamp;
match port {
CONTROL_INPUT => {
amp.input = data as *const LV2_Atom
},
SYNTH_OUTPUT => {
amp.output = data as *mut f32
},
WAVEFORM => {
amp.waveform = data as *mut f32
},
ATTACK => {
amp.attack = data as *mut f32
},
DECAY => {
amp.decay = data as *mut f32
},
SUSTAIN => {
amp.sustain = data as *mut f32
},
RELEASE => {
amp.release = data as *mut f32
},
SEC_WAVEFORM => {
amp.sec_waveform = data as *mut f32
},
SEC_FREQ_MUL => {
amp.sec_freq_mul = data as *mut f32
},
SEC_DEPTH => {
amp.sec_depth = data as *mut f32
}
FILTER_FREQ => {
amp.filter_freq = data as *mut f32
}
FILTER_ON => {
amp.filter_on = data as *mut f32
}
_ => {println!("SynthZ Connect to unknown port")}
}
}
}
extern fn activate(instance: LV2_Handle) {
}
extern fn deactivate(instance: LV2_Handle) {
}
fn extract_sequence(seq: *const LV2_Atom_Sequence, s: &SamplerUris) -> Vec<synth::SynthEvent> {
let mut ret = Vec::new();
let iter: AtomSequenceIter = AtomSequenceIter::new(seq);
for event in iter {
if event.data_type == s.midi_Event {
ret.push(synth::SynthEvent::new(event.time_frames,
synth::SynthEventBody::MidiData(MidiEvent::new(event.data, event.size))));
} else if event.data_type == s.atom_Object || event.data_type == s.atom_Blank {
let properties = synth::SynthEventBody::SynthProperties(
extract_object(event.data as *const LV2_Atom_Object_Body, event.size, s));
ret.push(synth::SynthEvent::new(event.time_frames, properties));
}
}
ret
}
fn extract_object(obj: *const LV2_Atom_Object_Body,
size: usize,
uris: &SamplerUris) -> Vec<synth::SynthProperty> {
unsafe {
let o_type = (*obj).otype;
let mut processed: usize = mem::size_of::<LV2_Atom_Object_Body>();
let mut items: Vec<synth::SynthProperty> = Vec::new();
while processed < size {
let pboffset = (obj as usize).checked_add(processed).unwrap();
let pbody: *const LV2_Atom_Property_Body = pboffset as *const LV2_Atom_Property_Body;
let body = &*pbody;
// TODO Get BPM, Bar (?), and BarBeat (?), BeatsPerBar (?)
if body.key == uris.time_frame {
assert_eq!(body.value.size as usize, mem::size_of::<i64>());
assert_eq!(body.value.atom_type, uris.atom_Long);
let value = pbody.offset(1) as *const i64;
items.push(synth::SynthProperty::Frame(*value));
} else if body.key == uris.time_speed {
assert_eq!(body.value.size as usize, mem::size_of::<f32>());
assert_eq!(body.value.atom_type, uris.atom_Float);
let value = pbody.offset(1) as *const f32;
items.push(synth::SynthProperty::Speed(*value));
}
processed = processed + pad_size(body.value.size) as usize + mem::size_of::<LV2_Atom_Property_Body>();
}
items
}
}
extern fn run(instance: LV2_Handle, n_samples: u32) {
let pamp: *mut Amp = instance as *mut Amp;
unsafe {
let amp = &mut *pamp;
let pinput = amp.input;
let input = &*pinput;
let uris = &.sampler_uris;
let synth = &mut amp.synth;
let waveform = *amp.waveform;
let filter_freq = *amp.filter_freq;
let filter_on = *amp.filter_on > 0.5;
let control = vec!(
synth::SynthProperty::Waveform(waveform),
synth::SynthProperty::Envelope(*amp.attack, *amp.decay, *amp.sustain, *amp.release),
synth::SynthProperty::Secondary(*amp.sec_waveform, *amp.sec_depth, *amp.sec_freq_mul),
synth::SynthProperty::FilterFreq(filter_freq),
synth::SynthProperty::FilterOn(filter_on)
);
let evs = vec!(synth::SynthEvent::new(0, synth::SynthEventBody::SynthProperties(control)));
synth.add_data(evs);
if input.atom_type == uris.atom_Sequence {
let midi_data = extract_sequence(pinput as *const LV2_Atom_Sequence, uris);
synth.add_data(midi_data);
}
let output: &mut [f32] = std::slice::from_raw_parts_mut(amp.output, n_samples as usize);
let out = synth.feed(n_samples as usize);
for i in 0..output.len() {
output[i as usize] = out[i as usize];
}
}
}
extern fn cleanup(instance: LV2_Handle) {
println!("SynthZ cleanup");
unsafe {
let amp: Box<Amp> = Box::from_raw(instance as *mut Amp);
drop(amp);
}
}
extern fn extension_data(uri: *const raw::c_char) -> *mut raw::c_void {
println!("SynthZ extension_data");
return std::ptr::null_mut();
}
#[no_mangle]
pub extern fn lv2_descriptor(index: u32) -> *const LV2_Descriptor {
println!("SynthZ lv2_descriptor");
match index {
0 => return &LV2DESCRIPTOR,
_ => return std::ptr::null_mut()
}
}
|
//! The line primitive
use crate::{
drawable::{Drawable, Pixel},
geometry::{Dimensions, Point, Size},
pixelcolor::PixelColor,
primitives::Primitive,
style::{PrimitiveStyle, Styled},
transform::Transform,
DrawTarget,
};
/// Line primitive
///
/// # Examples
///
/// The [macro examples](../../macro.egline.html) make for more concise code.
///
/// ## Create some lines with different styles
///
/// ```rust
/// use embedded_graphics::{
/// pixelcolor::Rgb565, prelude::*, primitives::Line, style::PrimitiveStyle,
/// };
/// # use embedded_graphics::mock_display::MockDisplay;
/// # let mut display = MockDisplay::default();
///
/// // Red 1 pixel wide line from (50, 20) to (60, 35)
/// Line::new(Point::new(50, 20), Point::new(60, 35))
/// .into_styled(PrimitiveStyle::with_stroke(Rgb565::RED, 1))
/// .draw(&mut display)?;
///
/// // Green 1 pixel wide line with translation applied
/// Line::new(Point::new(50, 20), Point::new(60, 35))
/// .translate(Point::new(65, 35))
/// .into_styled(PrimitiveStyle::with_stroke(Rgb565::GREEN, 1))
/// .draw(&mut display)?;
/// # Ok::<(), core::convert::Infallible>(())
/// ```
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Default)]
pub struct Line {
/// Start point
pub start: Point,
/// End point
pub end: Point,
}
impl Primitive for Line {}
impl Dimensions for Line {
fn top_left(&self) -> Point {
Point::new(self.start.x.min(self.end.x), self.start.y.min(self.end.y))
}
fn bottom_right(&self) -> Point {
self.top_left() + self.size()
}
fn size(&self) -> Size {
Size::from_bounding_box(self.start, self.end)
}
}
impl Line {
/// Create a new line
pub const fn new(start: Point, end: Point) -> Self {
Line { start, end }
}
}
/// Pixel iterator for each pixel in the line
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
pub(crate) struct LineIterator {
start: Point,
end: Point,
delta: Point,
/// in which quadrant is the line drawn (upper-left=(-1, -1), lower-right=(1, 1), ...)
direction: Point,
err: i32,
stop: bool,
}
impl LineIterator {
/// Create a new line iterator from a `Line`
pub(crate) fn new(line: &Line) -> Self {
let mut delta = line.end - line.start;
if delta.x < 0 {
delta = Point::new(-delta.x, delta.y);
}
if delta.y > 0 {
delta = Point::new(delta.x, -delta.y);
}
let direction = match (line.start.x >= line.end.x, line.start.y >= line.end.y) {
(false, false) => Point::new(1, 1),
(false, true) => Point::new(1, -1),
(true, false) => Point::new(-1, 1),
(true, true) => Point::new(-1, -1),
};
Self {
start: line.start,
end: line.end,
delta,
direction,
err: delta.x + delta.y,
stop: line.start == line.end, /* if line length is zero, draw nothing */
}
}
}
// [Bresenham's line algorithm](https://en.wikipedia.org/wiki/Bresenham%27s_line_algorithm)
impl Iterator for LineIterator {
type Item = Point;
fn next(&mut self) -> Option<Self::Item> {
if !self.stop {
let point = self.start;
if self.start == self.end {
self.stop = true;
}
let err_double = 2 * self.err;
if err_double > self.delta.y {
self.err += self.delta.y;
self.start += Point::new(self.direction.x, 0);
}
if err_double < self.delta.x {
self.err += self.delta.x;
self.start += Point::new(0, self.direction.y);
}
Some(point)
} else {
None
}
}
}
impl Transform for Line {
/// Translate the line from its current position to a new position by (x, y) pixels, returning
/// a new `Line`. For a mutating transform, see `translate_mut`.
///
/// ```
/// # use embedded_graphics::primitives::Line;
/// # use embedded_graphics::prelude::*;
/// let line = Line::new(Point::new(5, 10), Point::new(15, 20));
/// let moved = line.translate(Point::new(10, 10));
///
/// assert_eq!(moved.start, Point::new(15, 20));
/// assert_eq!(moved.end, Point::new(25, 30));
/// ```
fn translate(&self, by: Point) -> Self {
Self {
start: self.start + by,
end: self.end + by,
..*self
}
}
/// Translate the line from its current position to a new position by (x, y) pixels.
///
/// ```
/// # use embedded_graphics::primitives::Line;
/// # use embedded_graphics::prelude::*;
/// let mut line = Line::new(Point::new(5, 10), Point::new(15, 20));
/// line.translate_mut(Point::new(10, 10));
///
/// assert_eq!(line.start, Point::new(15, 20));
/// assert_eq!(line.end, Point::new(25, 30));
/// ```
fn translate_mut(&mut self, by: Point) -> &mut Self {
self.start += by;
self.end += by;
self
}
}
impl<'a, C> IntoIterator for &'a Styled<Line, PrimitiveStyle<C>>
where
C: PixelColor,
{
type Item = Pixel<C>;
type IntoIter = StyledLineIterator<C>;
fn into_iter(self) -> Self::IntoIter {
StyledLineIterator {
style: self.style,
line_iter: LineIterator::new(&self.primitive),
}
}
}
/// Pixel iterator for each pixel in the line
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
pub struct StyledLineIterator<C>
where
C: PixelColor,
{
style: PrimitiveStyle<C>,
line_iter: LineIterator,
}
// [Bresenham's line algorithm](https://en.wikipedia.org/wiki/Bresenham%27s_line_algorithm)
impl<C: PixelColor> Iterator for StyledLineIterator<C> {
type Item = Pixel<C>;
fn next(&mut self) -> Option<Self::Item> {
// Break if stroke width is zero
if self.style.stroke_width == 0 {
return None;
}
// Return none if stroke color is none
let stroke_color = self.style.stroke_color?;
self.line_iter
.next()
.map(|point| Pixel(point, stroke_color))
}
}
impl<'a, C: 'a> Drawable<C> for &Styled<Line, PrimitiveStyle<C>>
where
C: PixelColor,
{
fn draw<D: DrawTarget<C>>(self, display: &mut D) -> Result<(), D::Error> {
display.draw_line(self)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{drawable::Pixel, pixelcolor::BinaryColor};
fn test_expected_line(start: Point, end: Point, expected: &[(i32, i32)]) {
let line =
Line::new(start, end).into_styled(PrimitiveStyle::with_stroke(BinaryColor::On, 1));
let mut expected_iter = expected.iter();
for Pixel(coord, _) in line.into_iter() {
match expected_iter.next() {
Some(point) => assert_eq!(coord, Point::from(*point)),
// expected runs out of points before line does
None => unreachable!(),
}
}
// check that expected has no points left
assert!(expected_iter.next().is_none())
}
#[test]
fn bounding_box() {
let start = Point::new(10, 10);
let end = Point::new(20, 20);
let line: Line = Line::new(start, end);
let backwards_line: Line = Line::new(end, start);
assert_eq!(line.top_left(), start);
assert_eq!(line.bottom_right(), end);
assert_eq!(line.size(), Size::new(10, 10));
assert_eq!(backwards_line.top_left(), start);
assert_eq!(backwards_line.bottom_right(), end);
assert_eq!(backwards_line.size(), Size::new(10, 10));
}
#[test]
fn draws_no_dot() {
let start = Point::new(10, 10);
let end = Point::new(10, 10);
let expected = [];
test_expected_line(start, end, &expected);
}
#[test]
fn no_stroke_width_no_line() {
let start = Point::new(2, 3);
let end = Point::new(3, 2);
let line =
Line::new(start, end).into_styled(PrimitiveStyle::with_stroke(BinaryColor::On, 0));
assert!(line.into_iter().eq(core::iter::empty()));
}
#[test]
fn draws_short_correctly() {
let start = Point::new(2, 3);
let end = Point::new(3, 2);
let expected = [(2, 3), (3, 2)];
test_expected_line(start, end, &expected);
}
#[test]
fn draws_octant_1_correctly() {
let start = Point::new(10, 10);
let end = Point::new(15, 13);
let expected = [(10, 10), (11, 11), (12, 11), (13, 12), (14, 12), (15, 13)];
test_expected_line(start, end, &expected);
}
#[test]
fn draws_octant_2_correctly() {
let start = Point::new(10, 10);
let end = Point::new(13, 15);
let expected = [(10, 10), (11, 11), (11, 12), (12, 13), (12, 14), (13, 15)];
test_expected_line(start, end, &expected);
}
#[test]
fn draws_octant_3_correctly() {
let start = Point::new(10, 10);
let end = Point::new(7, 15);
let expected = [(10, 10), (9, 11), (9, 12), (8, 13), (8, 14), (7, 15)];
test_expected_line(start, end, &expected);
}
#[test]
fn draws_octant_4_correctly() {
let start = Point::new(10, 10);
let end = Point::new(5, 13);
let expected = [(10, 10), (9, 11), (8, 11), (7, 12), (6, 12), (5, 13)];
test_expected_line(start, end, &expected);
}
#[test]
fn draws_octant_5_correctly() {
let start = Point::new(10, 10);
let end = Point::new(5, 7);
let expected = [(10, 10), (9, 9), (8, 9), (7, 8), (6, 8), (5, 7)];
test_expected_line(start, end, &expected);
}
#[test]
fn draws_octant_6_correctly() {
let start = Point::new(10, 10);
let end = Point::new(7, 5);
let expected = [(10, 10), (9, 9), (9, 8), (8, 7), (8, 6), (7, 5)];
test_expected_line(start, end, &expected);
}
#[test]
fn draws_octant_7_correctly() {
let start = Point::new(10, 10);
let end = Point::new(13, 5);
let expected = [(10, 10), (11, 9), (11, 8), (12, 7), (12, 6), (13, 5)];
test_expected_line(start, end, &expected);
}
#[test]
fn draws_octant_8_correctly() {
let start = Point::new(10, 10);
let end = Point::new(15, 7);
let expected = [(10, 10), (11, 9), (12, 9), (13, 8), (14, 8), (15, 7)];
test_expected_line(start, end, &expected);
}
}
|
use common::error::Error;
use common::result::Result;
#[derive(Debug, Clone, PartialEq)]
pub struct Stars {
stars: u8,
}
impl Stars {
pub fn new(stars: u8) -> Result<Self> {
if stars > 5 {
return Err(Error::new("stars", "invalid_range"));
}
Ok(Stars { stars })
}
pub fn value(&self) -> u8 {
self.stars
}
}
|
use core::time;
use std::thread;
fn main() {
let numbers: Vec<usize> = vec![1, 2, 3, 4, 5, 6, 7];
/*
let th = thread::spawn(move || numbers.iter().sum::<usize>() / numbers.len());
match th.join() {
Ok(res) => println!("Result : {}", res),
Err(err) => println!("Error : {:?}", err),
};
*/
let res = thread::scope(|scoped| {
scoped
.spawn(|| {
println!("2:{:?}", thread::current().id());
thread::sleep(time::Duration::from_secs(120));
numbers.iter().sum::<usize>() / numbers.len()
})
.join()
.unwrap()
});
println!("{:?}", res);
}
|
use libc::c_ulong;
use x11::xlib;
use std::ffi::CString;
// For convenience
pub const MODKEY1: u32 = xlib::Mod1Mask;
pub const MODKEY2: u32 = xlib::Mod4Mask;
pub const SHIFT: u32 = xlib::ShiftMask;
// Key combos. We add our bindings here for wm actions
pub const EXIT_KEY: KeyCmd<'static> = KeyCmd{ key: "F1", modifier: MODKEY2|SHIFT };
pub const TERM_KEY: KeyCmd<'static> = KeyCmd{ key: "t", modifier: MODKEY2|SHIFT };
pub const RUN_KEY: KeyCmd<'static> = KeyCmd{ key: "r", modifier: MODKEY2 };
pub const RUN: &'static str = "dmenu_run";
pub const TERMINAL: &'static str = "termite";
// Mouse commands. We add our binds here for wm actions
pub const MOUSE_MOVE: MouseCmd = MouseCmd{ button: 1, modifier: MODKEY2 };
pub const MOUSE_RAISE: MouseCmd = MouseCmd{ button: 1, modifier: 0 };
pub const MOUSE_RESIZE: MouseCmd = MouseCmd{ button: 3, modifier: MODKEY2 };
// Mouse focus behavior
pub const SLOPPYFOCUS: bool = false;
// Borders. NOTE Color format is "rgb:ff/ff/ff" failure to use this code format
// will result in a segfault.
pub const BORDER0: Border<'static> = Border{ size: 2, color: "rgb:a5/a5/a5" };
pub const BORDER1: Border<'static> = Border{ size: 2, color: "rgb:18/18/18" };
pub const BORDER2: Border<'static> = Border{ size: 2, color: "rgb:aa/ff/33" };
pub const BORDER3: Border<'static> = Border{ size: 2, color: "rgb:00/bb/aa" };
pub const FBORDER0: Border<'static> = Border{ size: 2, color: "rgb:ff/00/00" };
pub const FBORDER1: Border<'static> = Border{ size: 2, color: "rgb:00/ff/00" };
pub const FBORDER2: Border<'static> = Border{ size: 2, color: "rgb:00/00/ff" };
pub const FBORDER3: Border<'static> = Border{ size: 2, color: "rgb:ff/ff/ff" };
pub const NUM_UNFOCUSED_BORDERS: usize = 4;
pub const UNFOCUSED_BORDERS: [Border<'static>; NUM_UNFOCUSED_BORDERS] =
[ BORDER0, BORDER1, BORDER2, BORDER3 ];
pub const NUM_FOCUSED_BORDERS: usize = 4;
pub const FOCUS_BORDERS: [Border<'static>; NUM_FOCUSED_BORDERS] =
[ FBORDER0, FBORDER1, FBORDER2, FBORDER3 ];
// Structs for configs
pub struct Border<'a> {
pub size: i32,
pub color: &'a str,
}
pub struct BorderInfo {
focus_size: i32,
unfocus_size: i32,
}
impl BorderInfo {
pub fn new( focus: [Border;NUM_FOCUSED_BORDERS],
unfocus: [Border;NUM_UNFOCUSED_BORDERS] ) -> BorderInfo {
let focus_iter = focus.into_iter();
let mut focus_size = 0;
for border in focus_iter {
focus_size += border.size;
}
let unfocus_iter = unfocus.into_iter();
let mut unfocus_size = 0;
for border in unfocus_iter {
unfocus_size += border.size;
}
BorderInfo {
focus_size: focus_size,
unfocus_size: unfocus_size,
}
}
pub fn get_focus_size( &self ) -> i32 {
self.focus_size
}
pub fn get_unfocus_size( &self ) -> i32 {
self.unfocus_size
}
}
#[derive(PartialEq)]
pub struct MouseCmd {
pub button: u32,
pub modifier: u32,
}
impl MouseCmd {
pub fn new( button: u32, modifier: u32 ) -> MouseCmd {
MouseCmd {
button: button,
modifier: modifier,
}
}
}
#[derive(PartialEq)]
pub struct KeyCmd<'a> {
key: &'a str,
modifier: u32,
}
impl<'a> KeyCmd<'a> {
pub fn new( key: &'a str, modifier: u32 ) -> KeyCmd {
KeyCmd {
key: key,
modifier: modifier,
}
}
pub fn get_keysym( key: KeyCmd ) -> c_ulong {
let key_string = CString::new( key.key ).unwrap();
unsafe {
xlib::XStringToKeysym( key_string.as_ptr() )
}
}
pub fn get_modifier( key: KeyCmd ) -> u32 {
key.modifier
}
pub fn get_key( key: KeyCmd ) -> CString {
CString::new( key.key ).unwrap()
}
}
|
use crate::{Dir, DirEntry};
use glob::{Pattern, PatternError};
impl<'a> Dir<'a> {
/// Search for a file or directory with a glob pattern.
pub fn find(&self, glob: &str) -> Result<impl Iterator<Item = &'a DirEntry<'a>>, PatternError> {
let pattern = Pattern::new(glob)?;
Ok(Globs::new(pattern, self))
}
}
#[derive(Debug, Clone, PartialEq)]
struct Globs<'a> {
stack: Vec<&'a DirEntry<'a>>,
pattern: Pattern,
}
impl<'a> Globs<'a> {
pub(crate) fn new(pattern: Pattern, root: &Dir<'a>) -> Globs<'a> {
let stack = root.entries().iter().collect();
Globs { stack, pattern }
}
}
impl<'a> Iterator for Globs<'a> {
type Item = &'a DirEntry<'a>;
fn next(&mut self) -> Option<Self::Item> {
while let Some(item) = self.stack.pop() {
self.stack.extend(item.children());
if self.pattern.matches_path(item.path()) {
return Some(item);
}
}
None
}
}
|
use gstreamer::{
event::{FlushStart, FlushStop},
prelude::*,
State,
};
use gstreamer as gst;
use gstreamer_app as gst_app;
use gstreamer_audio as gst_audio;
use parking_lot::Mutex;
use std::sync::Arc;
use super::{Open, Sink, SinkAsBytes, SinkError, SinkResult};
use crate::{
config::AudioFormat, convert::Converter, decoder::AudioPacket, NUM_CHANNELS, SAMPLE_RATE,
};
pub struct GstreamerSink {
appsrc: gst_app::AppSrc,
bufferpool: gst::BufferPool,
pipeline: gst::Pipeline,
format: AudioFormat,
async_error: Arc<Mutex<Option<String>>>,
}
impl Open for GstreamerSink {
fn open(device: Option<String>, format: AudioFormat) -> Self {
info!("Using GStreamer sink with format: {format:?}");
gst::init().expect("failed to init GStreamer!");
let gst_format = match format {
AudioFormat::F64 => gst_audio::AUDIO_FORMAT_F64,
AudioFormat::F32 => gst_audio::AUDIO_FORMAT_F32,
AudioFormat::S32 => gst_audio::AUDIO_FORMAT_S32,
AudioFormat::S24 => gst_audio::AUDIO_FORMAT_S2432,
AudioFormat::S24_3 => gst_audio::AUDIO_FORMAT_S24,
AudioFormat::S16 => gst_audio::AUDIO_FORMAT_S16,
};
let gst_info = gst_audio::AudioInfo::builder(gst_format, SAMPLE_RATE, NUM_CHANNELS as u32)
.build()
.expect("Failed to create GStreamer audio format");
let gst_caps = gst_info.to_caps().expect("Failed to create GStreamer caps");
let sample_size = format.size();
let gst_bytes = NUM_CHANNELS as usize * 2048 * sample_size;
let pipeline = gst::Pipeline::new(None);
let appsrc = gst::ElementFactory::make("appsrc")
.build()
.expect("Failed to create GStreamer appsrc element")
.downcast::<gst_app::AppSrc>()
.expect("couldn't cast AppSrc element at runtime!");
appsrc.set_caps(Some(&gst_caps));
appsrc.set_max_bytes(gst_bytes as u64);
appsrc.set_block(true);
let sink = match device {
None => {
// no need to dither twice; use librespot dithering instead
gst::parse_bin_from_description(
"audioconvert dithering=none ! audioresample ! autoaudiosink",
true,
)
.expect("Failed to create default GStreamer sink")
}
Some(ref x) => gst::parse_bin_from_description(x, true)
.expect("Failed to create custom GStreamer sink"),
};
pipeline
.add(&appsrc)
.expect("Failed to add GStreamer appsrc to pipeline");
pipeline
.add(&sink)
.expect("Failed to add GStreamer sink to pipeline");
appsrc
.link(&sink)
.expect("Failed to link GStreamer source to sink");
let bus = pipeline.bus().expect("couldn't get bus from pipeline");
let bufferpool = gst::BufferPool::new();
let mut conf = bufferpool.config();
conf.set_params(Some(&gst_caps), gst_bytes as u32, 0, 0);
bufferpool
.set_config(conf)
.expect("couldn't configure the buffer pool");
let async_error = Arc::new(Mutex::new(None));
let async_error_clone = async_error.clone();
bus.set_sync_handler(move |_bus, msg| {
match msg.view() {
gst::MessageView::Eos(_) => {
println!("gst signaled end of stream");
let mut async_error_storage = async_error_clone.lock();
*async_error_storage = Some(String::from("gst signaled end of stream"));
}
gst::MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
);
let mut async_error_storage = async_error_clone.lock();
*async_error_storage = Some(format!(
"Error from {:?}: {} ({:?})",
err.src().map(|s| s.path_string()),
err.error(),
err.debug()
));
}
_ => (),
}
gst::BusSyncReply::Drop
});
pipeline
.set_state(State::Ready)
.expect("unable to set the pipeline to the `Ready` state");
Self {
appsrc,
bufferpool,
pipeline,
format,
async_error,
}
}
}
impl Sink for GstreamerSink {
fn start(&mut self) -> SinkResult<()> {
*self.async_error.lock() = None;
self.appsrc.send_event(FlushStop::new(true));
self.bufferpool
.set_active(true)
.map_err(|e| SinkError::StateChange(e.to_string()))?;
self.pipeline
.set_state(State::Playing)
.map_err(|e| SinkError::StateChange(e.to_string()))?;
Ok(())
}
fn stop(&mut self) -> SinkResult<()> {
*self.async_error.lock() = None;
self.appsrc.send_event(FlushStart::new());
self.pipeline
.set_state(State::Paused)
.map_err(|e| SinkError::StateChange(e.to_string()))?;
self.bufferpool
.set_active(false)
.map_err(|e| SinkError::StateChange(e.to_string()))?;
Ok(())
}
sink_as_bytes!();
}
impl Drop for GstreamerSink {
fn drop(&mut self) {
let _ = self.pipeline.set_state(State::Null);
}
}
impl SinkAsBytes for GstreamerSink {
fn write_bytes(&mut self, data: &[u8]) -> SinkResult<()> {
if let Some(async_error) = &*self.async_error.lock() {
return Err(SinkError::OnWrite(async_error.to_string()));
}
let mut buffer = self
.bufferpool
.acquire_buffer(None)
.map_err(|e| SinkError::OnWrite(e.to_string()))?;
let mutbuf = buffer.make_mut();
mutbuf.set_size(data.len());
mutbuf
.copy_from_slice(0, data)
.map_err(|e| SinkError::OnWrite(e.to_string()))?;
self.appsrc
.push_buffer(buffer)
.map_err(|e| SinkError::OnWrite(e.to_string()))?;
Ok(())
}
}
impl GstreamerSink {
pub const NAME: &'static str = "gstreamer";
}
|
#[macro_use]
extern crate clap;
use clap::{AppSettings, Arg, SubCommand};
use opfs::block::sblock;
use opfs::file::*;
use opfs::subcommand;
use std::process::exit;
fn main() {
let matches = app_from_crate!()
.setting(AppSettings::SubcommandRequiredElseHelp)
.arg(
Arg::with_name("img_file")
.help("path to image file to manipulate")
.required(true)
.index(1),
)
.subcommand(
SubCommand::with_name("ls")
.about("list directory contents")
.arg(
Arg::with_name("path")
.help("path to file to look up")
.required(true)
.index(1),
),
)
.subcommand(
SubCommand::with_name("get")
.about("extract file")
.arg(
Arg::with_name("source")
.help("path to file to extract")
.required(true)
.index(1),
)
.arg(
Arg::with_name("destination")
.help("destination path of host")
.required(true)
.index(2),
),
)
.subcommand(
SubCommand::with_name("rm")
.about("remove directory entries")
.arg(
Arg::with_name("path")
.help("path to file to remove")
.required(true)
.index(1),
),
)
.subcommand(
SubCommand::with_name("put")
.about("put file into image file")
.arg(
Arg::with_name("source")
.help("path to file to put (host)")
.required(true)
.index(1),
)
.arg(
Arg::with_name("destination")
.help("destination path of image file")
.required(true)
.index(2),
),
)
.get_matches();
let path = matches.value_of("img_file").unwrap();
let file_size = match get_file_size(path) {
Ok(s) => s,
Err(e) => {
eprintln!("{}", e);
exit(1);
}
};
let file = match open_readable_and_writable_file(path) {
Ok(f) => f,
Err(e) => {
eprintln!("{}", e);
exit(1);
}
};
let mut img = match get_memory_mapped_file(&file, file_size) {
Ok(m) => m,
Err(e) => {
eprintln!("{}", e);
exit(1);
}
};
let sblock = sblock::u8_slice_as_superblock(&img);
sblock::check_magic_number(&sblock);
if let Some(ref matches) = matches.subcommand_matches("ls") {
let path = matches.value_of("path").unwrap();
subcommand::ls(&img, &path, &sblock);
} else if let Some(ref matches) = matches.subcommand_matches("get") {
let src = matches.value_of("source").unwrap();
let dst = matches.value_of("destination").unwrap();
subcommand::get(&img, &src, &dst, &sblock);
} else if let Some(ref matches) = matches.subcommand_matches("rm") {
let path = matches.value_of("path").unwrap();
subcommand::rm(&mut img, &path, &sblock);
} else if let Some(ref matches) = matches.subcommand_matches("put") {
let src = matches.value_of("source").unwrap();
let dst = matches.value_of("destination").unwrap();
subcommand::put(&mut img, &src, &dst, &sblock);
}
}
|
use shrev::*;
use component::event::*;
use protocol::client::*;
use types::event::{ConnectionClose, ConnectionOpen, Message};
use types::ConnectionId;
// Connection Events
pub type OnOpen = EventChannel<ConnectionOpen>;
pub type OnClose = EventChannel<ConnectionClose>;
// Timer Event
pub type OnTimerEvent = EventChannel<TimerEvent>;
// Packet Received Events
pub type OnBinary = EventChannel<Message>;
pub type OnLogin = EventChannel<(ConnectionId, Login)>;
pub type OnBackup = EventChannel<(ConnectionId, Backup)>;
pub type OnCommand = EventChannel<(ConnectionId, Command)>;
pub type OnHorizon = EventChannel<(ConnectionId, Horizon)>;
pub type OnKey = EventChannel<(ConnectionId, Key)>;
pub type OnPong = EventChannel<(ConnectionId, Pong)>;
pub type OnChat = EventChannel<(ConnectionId, Chat)>;
pub type OnSay = EventChannel<(ConnectionId, Say)>;
pub type OnTeamChat = EventChannel<(ConnectionId, TeamChat)>;
pub type OnWhisper = EventChannel<(ConnectionId, Whisper)>;
pub type OnVotemute = EventChannel<(ConnectionId, VoteMute)>;
pub type OnLocalPing = EventChannel<(ConnectionId, LocalPing)>;
pub type OnScoreDetailed = EventChannel<ScoreDetailedEvent>;
pub type OnAck = EventChannel<AckEvent>;
// In-game events
pub type OnPlayerJoin = EventChannel<PlayerJoin>;
pub type OnPlayerLeave = EventChannel<PlayerLeave>;
pub type OnPlayerKilled = EventChannel<PlayerKilled>;
pub type OnPlayerRespawn = EventChannel<PlayerRespawn>;
pub type OnPlayerSpectate = EventChannel<PlayerSpectate>;
// Collision events
pub type OnPlayerTerrainCollision = EventChannel<PlayerTerrainCollision>;
pub type OnPlayerMissileCollision = EventChannel<PlayerMissileCollision>;
pub type OnPlayerPowerupCollision = EventChannel<PlayerPowerupCollision>;
pub type OnMissileTerrainCollision = EventChannel<MissileTerrainCollision>;
// Readers
pub type OnOpenReader = ReaderId<ConnectionOpen>;
pub type OnCloseReader = ReaderId<ConnectionClose>;
pub type OnTimerEventReader = ReaderId<TimerEvent>;
pub type OnBinaryReader = ReaderId<Message>;
pub type OnLoginReader = ReaderId<(ConnectionId, Login)>;
pub type OnBackupReader = ReaderId<(ConnectionId, Backup)>;
pub type OnCommandReader = ReaderId<(ConnectionId, Command)>;
pub type OnHorizonReader = ReaderId<(ConnectionId, Horizon)>;
pub type OnKeyReader = ReaderId<(ConnectionId, Key)>;
pub type OnPongReader = ReaderId<(ConnectionId, Pong)>;
pub type OnChatReader = ReaderId<(ConnectionId, Chat)>;
pub type OnSayReader = ReaderId<(ConnectionId, Say)>;
pub type OnTeamChatReader = ReaderId<(ConnectionId, TeamChat)>;
pub type OnWhisperReader = ReaderId<(ConnectionId, Whisper)>;
pub type OnVotemuteReader = ReaderId<(ConnectionId, VoteMute)>;
pub type OnLocalPingReader = ReaderId<(ConnectionId, LocalPing)>;
pub type OnScoreDetailedReader = ReaderId<ScoreDetailedEvent>;
pub type OnAckReader = ReaderId<AckEvent>;
// In-game events
pub type OnPlayerJoinReader = ReaderId<PlayerJoin>;
pub type OnPlayerLeaveReader = ReaderId<PlayerLeave>;
pub type OnPlayerKilledReader = ReaderId<PlayerKilled>;
pub type OnPlayerRespawnReader = ReaderId<PlayerRespawn>;
pub type OnPlayerSpectateReader = ReaderId<PlayerSpectate>;
// Collision events
pub type OnPlayerMissileCollisionReader = ReaderId<PlayerMissileCollision>;
pub type OnPlayerTerrainCollisionReader = ReaderId<PlayerTerrainCollision>;
pub type OnPlayerPowerupCollisionReader = ReaderId<PlayerPowerupCollision>;
pub type OnMissileTerrainCollisionReader = ReaderId<MissileTerrainCollision>;
|
//temporary Lighthouse SSZ and hashing implementation
use bls::PublicKeyBytes;
use ethereum_types::H256 as Hash256;
use serde::{Deserialize, Serialize};
use ssz_derive::{Decode, Encode};
use ssz_types::{BitList, FixedVector, VariableList};
use tree_hash::TreeHash;
use tree_hash_derive::{SignedRoot, TreeHash};
use typenum::{Sum, U1};
use crate::config::*;
use crate::consts;
use crate::primitives::*;
#[derive(Clone, PartialEq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash, SignedRoot)]
pub struct Attestation<C: Config> {
pub aggregation_bits: BitList<C::MaxValidatorsPerCommittee>,
pub data: AttestationData,
#[signed_root(skip_hashing)]
pub signature: AggregateSignature,
}
#[derive(
Clone,
PartialEq,
Eq,
Debug,
Hash,
Deserialize,
Serialize,
Encode,
Decode,
TreeHash,
SignedRoot,
Default,
)]
pub struct AttestationData {
pub slot: Slot,
pub index: u64,
pub beacon_block_root: H256,
pub source: Checkpoint,
pub target: Checkpoint,
}
#[derive(
Clone, PartialEq, Eq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash, SignedRoot,
)]
pub struct AttestationDataAndCustodyBit {
pub data: AttestationData,
pub custody_bit: bool,
}
#[derive(
Clone,
Copy,
PartialEq,
Eq,
Debug,
Default,
Deserialize,
Serialize,
Encode,
Decode,
TreeHash,
SignedRoot,
)]
pub struct AttestationDuty {
pub slot: Slot,
pub shard: Shard,
pub committee_index: usize,
pub committee_len: usize,
}
#[derive(Clone, PartialEq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash)]
pub struct AttesterSlashing<C: Config> {
pub attestation_1: IndexedAttestation<C>,
pub attestation_2: IndexedAttestation<C>,
}
#[derive(Clone, PartialEq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash, SignedRoot)]
pub struct BeaconBlock<C: Config> {
pub slot: Slot,
pub parent_root: H256,
pub state_root: H256,
pub body: BeaconBlockBody<C>,
#[signed_root(skip_hashing)]
pub signature: Signature,
}
impl<C: Config> Default for BeaconBlock<C> {
fn default() -> Self {
#[allow(clippy::default_trait_access)]
Self {
slot: Default::default(),
parent_root: Default::default(),
state_root: Default::default(),
body: Default::default(),
signature: Signature::empty_signature(),
}
}
}
#[derive(Clone, PartialEq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash, SignedRoot)]
pub struct BeaconBlockBody<C: Config> {
pub randao_reveal: Signature,
pub eth1_data: Eth1Data,
pub graffiti: [u8; 32],
pub proposer_slashings: VariableList<ProposerSlashing, C::MaxProposerSlashings>,
pub attester_slashings: VariableList<AttesterSlashing<C>, C::MaxAttesterSlashings>,
pub attestations: VariableList<Attestation<C>, C::MaxAttestations>,
pub deposits: VariableList<Deposit, C::MaxDeposits>,
pub voluntary_exits: VariableList<VoluntaryExit, C::MaxVoluntaryExits>,
}
impl<C: Config> Default for BeaconBlockBody<C> {
fn default() -> Self {
#[allow(clippy::default_trait_access)]
Self {
randao_reveal: Signature::empty_signature(),
eth1_data: Default::default(),
graffiti: Default::default(),
proposer_slashings: Default::default(),
attester_slashings: Default::default(),
attestations: Default::default(),
deposits: Default::default(),
voluntary_exits: Default::default(),
}
}
}
#[derive(
Clone, PartialEq, Eq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash, SignedRoot,
)]
pub struct BeaconBlockHeader {
pub slot: Slot,
pub parent_root: H256,
pub state_root: H256,
pub body_root: H256,
#[signed_root(skip_hashing)]
pub signature: Signature,
}
impl Default for BeaconBlockHeader {
fn default() -> Self {
#[allow(clippy::default_trait_access)]
Self {
slot: Default::default(),
parent_root: Default::default(),
state_root: Default::default(),
body_root: Default::default(),
signature: Signature::empty_signature(),
}
}
}
impl BeaconBlockHeader {
pub fn canonical_root(&self) -> Hash256 {
Hash256::from_slice(&self.tree_hash_root()[..])
}
}
#[derive(
Clone,
Copy,
PartialEq,
Eq,
Debug,
Default,
Hash,
Deserialize,
Serialize,
Encode,
Decode,
TreeHash,
)]
pub struct Checkpoint {
pub epoch: Epoch,
pub root: H256,
}
#[derive(
Clone, PartialEq, Eq, Debug, Default, Hash, Deserialize, Serialize, Encode, Decode, TreeHash,
)]
pub struct Crosslink {
pub shard: u64,
pub parent_root: H256,
pub start_epoch: Epoch,
pub end_epoch: Epoch,
pub data_root: H256,
}
#[derive(Clone, PartialEq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash)]
pub struct Deposit {
pub proof: FixedVector<H256, Sum<consts::DepositContractTreeDepth, U1>>,
pub data: DepositData,
}
#[derive(
Clone, PartialEq, Eq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash, SignedRoot,
)]
pub struct DepositData {
pub pubkey: PublicKeyBytes,
pub withdrawal_credentials: H256,
pub amount: u64,
#[signed_root(skip_hashing)]
pub signature: SignatureBytes,
}
#[derive(Clone, PartialEq, Eq, Debug, Default, Deserialize, Serialize, Encode, Decode, TreeHash)]
pub struct Eth1Data {
pub deposit_root: H256,
pub deposit_count: u64,
pub block_hash: H256,
}
#[derive(
Clone,
PartialEq,
Eq,
Debug,
Deserialize,
Serialize,
Encode,
Decode,
TreeHash,
SignedRoot,
Default,
)]
pub struct Fork {
pub previous_version: Version,
pub current_version: Version,
pub epoch: Epoch,
}
#[derive(Clone, PartialEq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash)]
pub struct HistoricalBatch<C: Config> {
pub block_roots: FixedVector<H256, C::SlotsPerHistoricalRoot>,
pub state_roots: FixedVector<H256, C::SlotsPerHistoricalRoot>,
}
#[derive(
Clone, PartialEq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash, SignedRoot, Default,
)]
pub struct IndexedAttestation<C: Config> {
pub attesting_indices: VariableList<u64, C::MaxValidatorsPerCommittee>,
pub data: AttestationData,
#[signed_root(skip_hashing)]
pub signature: AggregateSignature,
}
#[derive(Clone, PartialEq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash)]
pub struct PendingAttestation<C: Config> {
pub aggregation_bits: BitList<C::MaxValidatorsPerCommittee>,
pub data: AttestationData,
pub inclusion_delay: u64,
pub proposer_index: u64,
}
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash)]
pub struct ProposerSlashing {
pub proposer_index: u64,
pub header_1: BeaconBlockHeader,
pub header_2: BeaconBlockHeader,
}
#[derive(
Clone, PartialEq, Eq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash, SignedRoot,
)]
pub struct Transfer {
pub sender: u64,
pub recipient: u64,
pub amount: u64,
pub fee: u64,
pub slot: Slot,
pub pubkey: PublicKey,
#[signed_root(skip_hashing)]
pub signature: Signature,
}
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash, Default)]
pub struct Validator {
pub pubkey: PublicKey,
pub withdrawal_credentials: H256,
pub effective_balance: u64,
pub slashed: bool,
pub activation_eligibility_epoch: Epoch,
pub activation_epoch: Epoch,
pub exit_epoch: Epoch,
pub withdrawable_epoch: Epoch,
}
#[derive(
Clone, PartialEq, Eq, Debug, Deserialize, Serialize, Encode, Decode, TreeHash, SignedRoot,
)]
pub struct VoluntaryExit {
pub epoch: Epoch,
pub validator_index: u64,
#[signed_root(skip_hashing)]
pub signature: Signature,
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use crate::{
utils::rescue::{Hash, Rescue128},
Example, ExampleOptions,
};
use log::debug;
use std::time::Instant;
use winterfell::{
crypto::MerkleTree,
math::{fields::f128::BaseElement, log2, FieldElement, StarkField},
ProofOptions, StarkProof, VerifierError,
};
mod air;
use air::{build_trace, MerkleAir, PublicInputs};
#[cfg(test)]
mod tests;
// MERKLE AUTHENTICATION PATH EXAMPLE
// ================================================================================================
pub fn get_example(options: ExampleOptions, tree_depth: usize) -> Box<dyn Example> {
Box::new(MerkleExample::new(
tree_depth,
options.to_proof_options(28, 8),
))
}
pub struct MerkleExample {
options: ProofOptions,
tree_root: Hash,
value: [BaseElement; 2],
index: usize,
path: Vec<Hash>,
}
impl MerkleExample {
pub fn new(tree_depth: usize, options: ProofOptions) -> MerkleExample {
assert!(
(tree_depth + 1).is_power_of_two(),
"tree depth must be one less than a power of 2"
);
let value = [BaseElement::new(42), BaseElement::new(43)];
let index = (BaseElement::rand().as_int() % u128::pow(2, tree_depth as u32)) as usize;
// build Merkle tree of the specified depth
let now = Instant::now();
let tree = build_merkle_tree(tree_depth, value, index);
debug!(
"Built Merkle tree of depth {} in {} ms",
tree_depth,
now.elapsed().as_millis(),
);
// compute Merkle path form the leaf specified by the index
let now = Instant::now();
let path = tree.prove(index).unwrap();
debug!(
"Computed Merkle path from leaf {} to root {} in {} ms",
index,
hex::encode(tree.root()),
now.elapsed().as_millis(),
);
MerkleExample {
options,
tree_root: *tree.root(),
value,
index,
path,
}
}
}
// EXAMPLE IMPLEMENTATION
// ================================================================================================
impl Example for MerkleExample {
fn prove(&self) -> StarkProof {
// generate the execution trace
debug!(
"Generating proof for proving membership in a Merkle tree of depth {}\n\
---------------------",
self.path.len()
);
let now = Instant::now();
let trace = build_trace(self.value, &self.path, self.index);
let trace_length = trace.length();
debug!(
"Generated execution trace of {} registers and 2^{} steps in {} ms",
trace.width(),
log2(trace_length),
now.elapsed().as_millis()
);
// generate the proof
let pub_inputs = PublicInputs {
tree_root: self.tree_root.to_elements(),
};
winterfell::prove::<MerkleAir>(trace, pub_inputs, self.options.clone()).unwrap()
}
fn verify(&self, proof: StarkProof) -> Result<(), VerifierError> {
let pub_inputs = PublicInputs {
tree_root: self.tree_root.to_elements(),
};
winterfell::verify::<MerkleAir>(proof, pub_inputs)
}
fn verify_with_wrong_inputs(&self, proof: StarkProof) -> Result<(), VerifierError> {
let tree_root = self.tree_root.to_elements();
let pub_inputs = PublicInputs {
tree_root: [tree_root[1], tree_root[0]],
};
winterfell::verify::<MerkleAir>(proof, pub_inputs)
}
}
// HELPER FUNCTIONS
// ================================================================================================
fn build_merkle_tree(depth: usize, value: [BaseElement; 2], index: usize) -> MerkleTree<Rescue128> {
let num_leaves = usize::pow(2, depth as u32);
let leaf_elements = BaseElement::prng_vector([1; 32], num_leaves * 2);
let mut leaves = Vec::new();
for i in (0..leaf_elements.len()).step_by(2) {
leaves.push(Hash::new(leaf_elements[i], leaf_elements[i + 1]));
}
leaves[index] = Rescue128::digest(&value);
MerkleTree::new(leaves).unwrap()
}
|
pub mod benchtemplate;
pub mod filterbench;
pub mod joinbench;
pub mod test;
|
fn main() {
let my_var = 7;
println!("my_var = {:?}", my_var);
{
let my_var = 8; // shadow
println!("inner scope my_var = {:?}", my_var);
}
let my_var = "foo"; // another shadow
println!("shadowed my_var = {:?}", my_var);
} |
use std::path::{Path, PathBuf};
use anyhow::{anyhow, Result};
/// Returns a canonical absolute file path for the `filepath` argument.
/// This function will return an error in (at least) the following situations:
///
/// - The path does not exist.
/// - A non-final component in path is not a directory.
pub(crate) fn get_absolute_filepath<P>(filepath: P) -> Result<PathBuf>
where
P: AsRef<Path>,
{
match filepath.as_ref().canonicalize() {
Ok(pb) => Ok(pb),
Err(error) => Err(anyhow!(error)),
}
}
/// Returns a boolean that indicates whether the `filepath` parameter
/// includes the `extension` file extension. Also returns `false` in
/// cases where the filepath does not exist or there is no extension.
pub(crate) fn path_has_extension<P>(filepath: P, extension: &str) -> bool
where
P: AsRef<Path>,
{
match filepath.as_ref().extension() {
Some(ext) => {
// permissive comparison of the extension
// allows for use of period character in
// the parameter (e.g., `.txt`)
if extension.starts_with(".") {
return ext.to_str().unwrap() == &extension[1..];
}
// or no period character in the extension
// parameter (e.g., `txt`)
return ext.to_str().unwrap() == extension;
}
None => return false,
}
}
/// Returns a boolean that indicates whether there is a dot directory
/// or dot file anywhere in the canonical absolute path to the file.
/// This is used as a filter during execution and requires a valid
/// file or directory path. Panic is raised if the path is not valid.
pub(crate) fn path_is_hidden<P>(filepath: P) -> bool
where
P: AsRef<Path>,
{
match get_absolute_filepath(filepath) {
Ok(pb) => {
for path in pb.iter() {
if path.to_str().unwrap().starts_with(".") {
return true;
}
}
false
}
Err(_) => false,
}
}
#[cfg(test)]
mod tests {
use super::*;
// ======================================
// get_absolute_filepath function tests
// ======================================
#[test]
fn test_get_absolute_filepath_good_path() {
let testpath = get_absolute_filepath("./Cargo.toml");
assert!(testpath.is_ok());
assert!(testpath.unwrap().ends_with("recurse/Cargo.toml"));
}
#[test]
fn test_get_absolute_filepath_bad_path() {
let testpath = get_absolute_filepath("./bogus.bad");
assert!(testpath.is_err());
}
// ======================================
// path_has_extension function tests
// ======================================
#[test]
fn test_path_has_extension_with_correct_extension() {
let testpath = Path::new("./tests/testfiles/path/test.txt");
assert!(path_has_extension(testpath, ".txt"));
assert!(path_has_extension(testpath, "txt"));
}
#[test]
fn test_path_has_extension_with_incorrect_extension() {
let testpath = Path::new("./tests/testfiles/path/test.txt");
assert_eq!(path_has_extension(testpath, ".yaml"), false);
assert_eq!(path_has_extension(testpath, "yaml"), false);
}
#[test]
fn test_path_has_extension_with_no_extension() {
let testpath = Path::new("./tests/testfiles/path/testfile");
assert_eq!(path_has_extension(testpath, ".txt"), false);
assert_eq!(path_has_extension(testpath, "txt"), false);
}
// ======================================
// path_is_hidden function tests
// ======================================
#[test]
fn test_path_is_hidden_with_dotfile() {
let testpath = Path::new("./tests/testfiles/path/.testfile");
assert!(path_is_hidden(testpath));
assert!(path_is_hidden(testpath)); // confirm that we do not transfer ownership
}
#[test]
fn test_path_is_hidden_with_dotfile_pathbuf() {
let testpath = PathBuf::from("./tests/testfiles/path/.testfile");
// Owned types need to be borrowed or ownership is relinquished and
// raises panic
assert!(path_is_hidden(&testpath)); // addressed by using &testpath
assert!(path_is_hidden(testpath)); // ownership transitions on this call
}
#[test]
fn test_path_is_hidden_with_dotdir_in_path() {
let testpath = Path::new("./tests/testfiles/.dotdir/testfile");
assert!(path_is_hidden(testpath));
}
#[test]
fn test_path_is_hidden_with_dotdir_only() {
let testpath = Path::new("./tests/testfiles/.dotdir");
assert!(path_is_hidden(testpath));
}
#[test]
fn test_path_is_not_hidden_without_dotfile_or_dotdir() {
let testpath = Path::new("./tests/testfiles/path/testfile");
assert_eq!(path_is_hidden(testpath), false);
}
}
|
pub mod color_control;
pub mod switch;
pub mod switch_level;
pub use self::color_control::ColorControl;
pub use self::switch::Switch;
pub use self::switch_level::SwitchLevel;
|
use model::*;
use errors::*;
use url::Url;
use serde_json::from_str;
use tungstenite::connect;
use tungstenite::protocol::WebSocket;
use tungstenite::client::AutoStream;
use tungstenite::handshake::client::Response;
static WEBSOCKET_URL: &'static str = "wss://stream.binance.com:9443/ws/";
static OUTBOUND_ACCOUNT_INFO: &'static str = "outboundAccountInfo";
static EXECUTION_REPORT: &'static str = "executionReport";
static KLINE: &'static str = "kline";
static AGGREGATED_TRADE: &'static str = "aggTrade";
pub trait UserStreamEventHandler {
fn account_update_handler(&self, event: &AccountUpdateEvent);
fn order_trade_handler(&self, event: &OrderTradeEvent);
}
pub trait MarketEventHandler {
fn aggregated_trades_handler(&self, event: &TradesEvent);
}
pub trait KlineEventHandler {
fn kline_handler(&self, event: &KlineEvent);
}
#[derive(Default)]
pub struct WebSockets {
socket: Option<(WebSocket<AutoStream>, Response)>,
user_stream_handler: Option<Box<UserStreamEventHandler>>,
market_handler: Option<Box<MarketEventHandler>>,
kline_handler: Option<Box<KlineEventHandler>>,
}
impl WebSockets {
pub fn new() -> WebSockets {
WebSockets {
socket: None,
user_stream_handler: None,
market_handler: None,
kline_handler: None,
}
}
pub fn connect(&mut self, endpoint: &str) -> Result<()> {
let wss: String = format!("{}{}", WEBSOCKET_URL, endpoint);
let url = Url::parse(&wss)?;
match connect(url) {
Ok(answer) => {
self.socket = Some(answer);
Ok(())
}
Err(e) => {
bail!(format!("Error during handshake {}", e));
}
}
}
pub fn add_user_stream_handler<H>(&mut self, handler: H)
where
H: UserStreamEventHandler + 'static,
{
self.user_stream_handler = Some(Box::new(handler));
}
pub fn add_market_handler<H>(&mut self, handler: H)
where
H: MarketEventHandler + 'static,
{
self.market_handler = Some(Box::new(handler));
}
pub fn add_kline_handler<H>(&mut self, handler: H)
where
H: KlineEventHandler + 'static,
{
self.kline_handler = Some(Box::new(handler));
}
pub fn event_loop(&mut self) {
loop {
if let Some(ref mut socket) = self.socket {
let msg: String = socket.0.read_message().unwrap().into_text().unwrap();
if msg.find(OUTBOUND_ACCOUNT_INFO) != None {
let account_update: AccountUpdateEvent = from_str(msg.as_str()).unwrap();
if let Some(ref h) = self.user_stream_handler {
h.account_update_handler(&account_update);
}
} else if msg.find(EXECUTION_REPORT) != None {
let order_trade: OrderTradeEvent = from_str(msg.as_str()).unwrap();
if let Some(ref h) = self.user_stream_handler {
h.order_trade_handler(&order_trade);
}
} else if msg.find(AGGREGATED_TRADE) != None {
let trades: TradesEvent = from_str(msg.as_str()).unwrap();
if let Some(ref h) = self.market_handler {
h.aggregated_trades_handler(&trades);
}
} else if msg.find(KLINE) != None {
let kline: KlineEvent = from_str(msg.as_str()).unwrap();
if let Some(ref h) = self.kline_handler {
h.kline_handler(&kline);
}
}
}
}
}
}
|
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum Error {
BadRequestError(crate::error::BadRequestError),
CapacityExceededError(crate::error::CapacityExceededError),
InvalidSessionError(crate::error::InvalidSessionError),
LimitExceededError(crate::error::LimitExceededError),
OccConflictError(crate::error::OccConflictError),
RateExceededError(crate::error::RateExceededError),
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Error::BadRequestError(inner) => inner.fmt(f),
Error::CapacityExceededError(inner) => inner.fmt(f),
Error::InvalidSessionError(inner) => inner.fmt(f),
Error::LimitExceededError(inner) => inner.fmt(f),
Error::OccConflictError(inner) => inner.fmt(f),
Error::RateExceededError(inner) => inner.fmt(f),
Error::Unhandled(inner) => inner.fmt(f),
}
}
}
impl From<smithy_http::result::SdkError<crate::error::SendCommandError>> for Error {
fn from(err: smithy_http::result::SdkError<crate::error::SendCommandError>) -> Self {
match err {
smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::SendCommandErrorKind::BadRequestError(inner) => {
Error::BadRequestError(inner)
}
crate::error::SendCommandErrorKind::CapacityExceededError(inner) => {
Error::CapacityExceededError(inner)
}
crate::error::SendCommandErrorKind::InvalidSessionError(inner) => {
Error::InvalidSessionError(inner)
}
crate::error::SendCommandErrorKind::LimitExceededError(inner) => {
Error::LimitExceededError(inner)
}
crate::error::SendCommandErrorKind::OccConflictError(inner) => {
Error::OccConflictError(inner)
}
crate::error::SendCommandErrorKind::RateExceededError(inner) => {
Error::RateExceededError(inner)
}
crate::error::SendCommandErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl std::error::Error for Error {}
|
use crate::common::*;
#[derive(Debug, PartialEq)]
pub(crate) enum Warning {
// Remove this on 2021-07-01.
#[allow(dead_code)]
DotenvLoad,
}
impl Warning {
fn context(&self) -> Option<&Token> {
match self {
Self::DotenvLoad => None,
}
}
}
impl Display for Warning {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let warning = Color::fmt(f).warning();
let message = Color::fmt(f).message();
write!(f, "{} {}", warning.paint("warning:"), message.prefix())?;
match self {
Self::DotenvLoad => {
#[rustfmt::skip]
write!(f, "\
A `.env` file was found and loaded, but this behavior will change in the future.
To silence this warning and continue loading `.env` files, add:
set dotenv-load := true
To silence this warning and stop loading `.env` files, add:
set dotenv-load := false
See https://github.com/casey/just/issues/469 for more details.")?;
},
}
write!(f, "{}", message.suffix())?;
if let Some(token) = self.context() {
writeln!(f)?;
token.write_context(f, Color::fmt(f).warning())?;
}
Ok(())
}
}
|
use crate::error::{Error, Result};
use std::env;
#[derive(PartialEq, Debug)]
pub enum Update {
Wip,
Overwrite,
}
impl Default for Update {
fn default() -> Self {
Update::Wip
}
}
impl Update {
pub fn env() -> Result<Self> {
let var = match env::var_os("TRYBUILD") {
Some(var) => var,
None => return Ok(Update::default()),
};
match var.as_os_str().to_str() {
Some("wip") => Ok(Update::Wip),
Some("overwrite") => Ok(Update::Overwrite),
_ => Err(Error::UpdateVar(var)),
}
}
}
|
pub mod user;
pub mod pool_handler;
|
#![deny(warnings, rust_2018_idioms)]
use linkerd2_stack::NewService;
use parking_lot::RwLock;
use std::{
collections::{hash_map::Entry, HashMap},
hash::Hash,
sync::{Arc, Weak},
};
use tracing::{debug, trace};
pub mod layer;
pub use self::layer::CacheLayer;
#[derive(Clone)]
pub struct Cache<T, N>
where
T: Eq + Hash,
N: NewService<(T, Handle)>,
{
new_service: N,
services: Services<T, N::Service>,
}
/// A tracker inserted into each inner service that, when dropped, indicates the service may be
/// removed from the cache.
#[derive(Clone, Debug)]
pub struct Handle(Arc<()>);
type Services<T, S> = Arc<RwLock<HashMap<T, (S, Weak<()>)>>>;
// === impl Cache ===
impl<T, N> Cache<T, N>
where
T: Eq + Hash,
N: NewService<(T, Handle)>,
{
pub fn new(new_service: N) -> Self {
Self {
new_service,
services: Services::default(),
}
}
fn new_entry(new: &mut N, target: T) -> (N::Service, Weak<()>) {
let handle = Arc::new(());
let weak = Arc::downgrade(&handle);
let svc = new.new_service((target, Handle(handle)));
(svc, weak)
}
}
impl<T, N> NewService<T> for Cache<T, N>
where
T: Clone + Eq + Hash,
N: NewService<(T, Handle)>,
N::Service: Clone,
{
type Service = N::Service;
fn new_service(&mut self, target: T) -> N::Service {
// We expect the item to be available in most cases, so initially obtain only a read lock.
if let Some((service, weak)) = self.services.read().get(&target) {
if weak.upgrade().is_some() {
trace!("Using cached service");
return service.clone();
}
}
// Otherwise, obtain a write lock to insert a new service.
let mut services = self.services.write();
let service = match services.entry(target.clone()) {
Entry::Occupied(mut entry) => {
// Another thread raced us to create a service for this target. Use it.
let (svc, weak) = entry.get();
if weak.upgrade().is_some() {
trace!("Using cached service");
svc.clone()
} else {
debug!("Caching new service");
let (svc, weak) = Self::new_entry(&mut self.new_service, target);
entry.insert((svc.clone(), weak));
svc
}
}
Entry::Vacant(entry) => {
// Make a new service for the target.
debug!("Caching new service");
let (svc, weak) = Self::new_entry(&mut self.new_service, target);
entry.insert((svc.clone(), weak));
svc
}
};
// Drop defunct services before inserting the new service into the
// cache.
let n = services.len();
services.retain(|_, (_, weak)| {
if weak.strong_count() > 0 {
true
} else {
trace!("Dropping defunct service");
false
}
});
debug!(services = services.len(), dropped = n - services.len());
service
}
}
|
mod join_handle;
mod stream;
use std::{future::Future, time::Duration};
use futures::future::{self, Either};
use futures_timer::Delay;
pub(crate) use self::{join_handle::AsyncJoinHandle, stream::AsyncStream};
use crate::error::{ErrorKind, Result};
/// An abstract handle to the async runtime.
#[derive(Clone, Copy, Debug)]
pub(crate) enum AsyncRuntime {
/// Represents the `tokio` runtime.
#[cfg(feature = "tokio-runtime")]
Tokio,
/// Represents the `async-std` runtime.
#[cfg(feature = "async-std-runtime")]
AsyncStd,
}
impl AsyncRuntime {
/// Spawn a task in the background to run a future.
///
/// If the runtime is still running, this will return a handle to the background task.
/// Otherwise, it will return `None`.
pub(crate) fn spawn<F, O>(self, fut: F) -> Option<AsyncJoinHandle<O>>
where
F: Future<Output = O> + Send + 'static,
O: Send + 'static,
{
match self {
#[cfg(feature = "tokio-runtime")]
Self::Tokio => {
use tokio::runtime::Handle;
let handle = Handle::try_current().ok()?;
Some(AsyncJoinHandle::Tokio(handle.spawn(fut)))
}
#[cfg(feature = "async-std-runtime")]
Self::AsyncStd => Some(AsyncJoinHandle::AsyncStd(async_std::task::spawn(fut))),
}
}
/// Spawn a task in the background to run a future.
pub(crate) fn execute<F, O>(self, fut: F)
where
F: Future<Output = O> + Send + 'static,
O: Send + 'static,
{
self.spawn(fut);
}
/// Run a future in the foreground, blocking on it completing.
pub(crate) fn block_on<F, T>(self, fut: F) -> T
where
F: Future<Output = T> + Send,
T: Send,
{
#[cfg(all(feature = "tokio-runtime", not(feature = "async-std-runtime")))]
{
use tokio::runtime::Handle;
Handle::current().enter(|| futures::executor::block_on(fut))
}
#[cfg(feature = "async-std-runtime")]
{
async_std::task::block_on(fut)
}
}
/// Await on a future for a maximum amount of time before returning an error.
pub(crate) async fn await_with_timeout<F>(
self,
future: F,
timeout: Duration,
) -> Result<F::Output>
where
F: Future + Send + Unpin,
{
match future::select(future, Delay::new(timeout)).await {
Either::Left((result, _)) => Ok(result),
Either::Right(_) => Err(ErrorKind::InternalError {
message: "Timed out waiting on future".to_string(),
}
.into()),
}
}
}
|
fn main() {
let data = std::fs::read_to_string("../input.txt").unwrap();
let count = data
.split("\n\n")
.filter(|passport| {
let mut valid_count = 0;
let mut has_cid = false;
for field in passport.split_whitespace() {
let mut parts: Vec<&str> = field.splitn(2, ":").collect();
assert!(parts.len() == 2);
let id = parts.remove(0);
let value = parts.remove(0);
if match id {
"byr" => match value.parse::<usize>() {
Ok(y) => y >= 1920 && y <= 2002,
Err(_) => false,
},
"iyr" => match value.parse::<usize>() {
Ok(y) => y >= 2010 && y <= 2020,
Err(_) => false,
},
"eyr" => match value.parse::<usize>() {
Ok(y) => y >= 2020 && y <= 2030,
Err(_) => false,
},
"hgt" => {
let units: String = value.chars().rev().take(2).collect();
let value: String = value.chars().take(value.len() - 2).collect();
match value.parse::<usize>() {
Ok(value) => match units.as_str() {
"mc" => value >= 150 && value <= 193,
"ni" => value >= 59 && value <= 76,
_ => false,
},
Err(_) => false,
}
}
"hcl" => {
value.chars().nth(0).unwrap() == '#'
&& value.chars().skip(1).all(|c| c.is_digit(16))
}
"ecl" => match value {
"amb" | "blu" | "brn" | "gry" | "grn" | "hzl" | "oth" => true,
_ => false,
},
"pid" => value.len() == 9 && value.chars().all(|c| c.is_digit(10)),
"cid" => {
has_cid = true;
true
}
_ => false,
} {
valid_count += 1;
}
}
valid_count == 8 || (valid_count == 7 && !has_cid)
})
.count();
println!("{}", count);
}
|
use std::collections::VecDeque;
use std::iter::FromIterator;
use aoc2019::io::slurp_stdin;
extern crate regex;
enum Technique {
DealIntoNew,
Cut(i64),
DealWithIncrement(i64),
}
struct Deck {
// Front is top
cards: VecDeque<i64>,
}
impl Deck {
fn new(n: i64) -> Self {
let mut cards = VecDeque::new();
for i in 0..n {
cards.push_back(i)
}
Deck { cards }
}
fn deal_into_new(&mut self) {
let mut cards = VecDeque::new();
for i in self.cards.iter() {
cards.push_front(*i);
}
self.cards = cards;
}
fn cut(&mut self, n: i64) {
let count = if n >= 0 {
n
} else {
self.cards.len() as i64 + n
};
for _ in 0..count {
let i = self.cards.pop_front().unwrap();
self.cards.push_back(i);
}
}
fn deal_with_increment(&mut self, n: i64) {
let mut cards = Vec::new();
cards.resize(self.cards.len(), -1);
let mut ix = 0i64;
for card in self.cards.iter() {
assert_eq!(cards[ix as usize], -1);
cards[ix as usize] = *card;
ix = (ix + n) % cards.len() as i64;
}
self.cards = VecDeque::from_iter(cards.into_iter());
}
fn cards(&self) -> Vec<i64> {
Vec::from_iter(self.cards.iter().cloned())
}
}
fn parse_input(s: &str) -> Vec<Technique> {
let deal_into_new = regex::Regex::new(r"deal into new").unwrap();
let cut = regex::Regex::new(r"cut (-?[0-9]+)").unwrap();
let deal_with_inc = regex::Regex::new(r"deal with increment ([0-9]+)").unwrap();
let mut ret = Vec::new();
for line in s.lines() {
if let Some(_) = deal_into_new.captures(line) {
ret.push(Technique::DealIntoNew)
} else if let Some(c) = cut.captures(line) {
ret.push(Technique::Cut(c.get(1).unwrap().as_str().parse().unwrap()));
} else if let Some(c) = deal_with_inc.captures(line) {
ret.push(Technique::DealWithIncrement(c.get(1).unwrap().as_str().parse().unwrap()));
} else {
assert!(line.is_empty());
}
}
ret
}
// A transform is defined as k*x + m (mod cards)
fn find_transform(techniques: &[Technique], cards: i64) -> (i64, i64) {
let mut k = 1;
let mut m = 0;
for technique in techniques {
match *technique {
Technique::DealIntoNew => {
// x -> C-1 - x (mod C)
k = -k;
m = (cards - 1 - m) % cards
},
Technique::Cut(n) => {
// x -> x - n (mod C)
m = (m - n) % cards
},
Technique::DealWithIncrement(n) => {
// x -> x * n (mod C)
k = (n * k) % cards;
m = (n * m) % cards;
},
}
}
k = (k + cards) % cards;
m = (m + cards) % cards;
assert!(0 <= k && k < cards);
assert!(0 <= m && m < cards);
(k, m)
}
// base^expt (mod n)
fn exp_modn(base: i64, expt: i64, n: i64) -> i64 {
let mut base = base as i128;
let mut expt = expt as i128;
let n = n as i128;
assert!(expt >= 0);
let mut acc = 1i128;
// Invariant: result = acc * base^expt (mod n)
while expt > 0 {
if expt % 2 == 0 {
base = (base * base) % n;
expt /= 2;
} else {
acc = (acc * base) % n;
expt -= 1;
}
}
acc as i64
}
fn inverse_mod(x: i64, n: i64) -> Option<i64> {
assert!(n > 0);
let mut a = ((x % n) + n) % n;
let mut a1 = n;
let mut s = 1;
let mut s1 = 0;
let mut t = 0;
let mut t1 = 1;
while a1 > 0 {
let q = a / a1;
let r = a % a1;
a = a1;
a1 = r;
let ss = s - q*s1;
s = s1;
s1 = ss;
let tt = t - q*t1;
t = t1;
t1 = tt;
}
assert_eq!((s as i128) * (x as i128) + (t as i128) * (n as i128), a as i128);
if a == 1 {
Some(((s % n) + n) % n)
} else {
None
}
}
fn mul_modn(x: i64, y: i64, n: i64) -> i64 {
(((x as i128) * (y as i128)) % (n as i128)) as i64
}
// If f(x) = k*x + m (mod n), compute f^(expt)(x) (mod n) = k'*x + m' (mod n)
fn exp_transform_modn(k: i64, m: i64, expt: i64, n: i64) -> (i64, i64) {
let mul = |x, y| mul_modn(x, y, n);
// k^expt
let k_expt = exp_modn(k, expt, n);
// (k-1)^-1
let km1_inv = inverse_mod(k-1, n).unwrap();
// 1 + k + k² + k³ + ... + k^n-1 = (k^n - 1) / (k - 1)
let k_series = mul(k_expt - 1, km1_inv);
let mm = mul(k_series, m);
(k_expt, mm)
}
fn main() {
let techniques = parse_input(&slurp_stdin());
let mut deck = Deck::new(10007);
for technique in &techniques {
match *technique {
Technique::DealIntoNew => deck.deal_into_new(),
Technique::Cut(n) => deck.cut(n),
Technique::DealWithIncrement(n) => deck.deal_with_increment(n),
}
}
for (pos, card) in deck.cards().into_iter().enumerate() {
if card == 2019 {
println!("{}", pos);
break;
}
}
const CARDS: i64 = 119315717514047;
const TIMES: i64 = 101741582076661;
// Express the full transform as a transform on the indices: f(i) = k*i + m (mod CARDS)
let (k, m) = find_transform(&techniques, CARDS);
// Raise that transform to the power TIMES: f^TIMES(i) = kt*i + mt (mod CARDS)
let (kt, mt) = exp_transform_modn(k, m, TIMES, CARDS);
// Compute ans such that kt*ans + mt = 2020 (mod CARDS)
// ans = (2020 - mt) * kt^-1 (mod CARDS)
let kt_inv = inverse_mod(kt, CARDS).unwrap();
let ans = mul_modn(2020 - mt, kt_inv, CARDS);
assert_eq!((mul_modn(kt, ans, CARDS) + mt) % CARDS, 2020);
println!("{}", (ans + CARDS) % CARDS);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_deal() {
let mut deck = Deck::new(10);
deck.deal_into_new();
assert_eq!(deck.cards, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
}
#[test]
fn test_cut() {
let mut deck = Deck::new(10);
deck.cut(3);
assert_eq!(deck.cards, vec![3, 4, 5, 6, 7, 8, 9, 0, 1, 2]);
let mut deck = Deck::new(10);
deck.cut(-4);
assert_eq!(deck.cards, vec![6, 7, 8, 9, 0, 1, 2, 3, 4, 5]);
}
#[test]
fn test_deal_with_increment() {
let mut deck = Deck::new(10);
deck.deal_with_increment(3);
assert_eq!(deck.cards, vec![0, 7, 4, 1, 8, 5, 2, 9, 6, 3]);
}
}
|
extern crate cairo;
extern crate pango;
extern crate pangocairo;
extern crate gtk;
extern crate gdk;
extern crate gdk_sys;
use config::{Color, Config};
use gtk::prelude::*;
use status::StatusItem;
use std::cell::{Cell, RefCell};
use std::rc::Rc;
use std::sync::mpsc;
use std::thread;
pub struct StatusComponent {
pub widget: gtk::DrawingArea,
text: String,
icon: String,
color: Color,
size_request: Cell<SizeRequest>
}
pub enum StatusChange {
Text(String),
Icon(String),
Color(Color),
Size(SizeRequest)
}
#[derive(Copy, Clone)]
pub enum SizeRequest {
Expand,
Set,
Keep
}
impl StatusComponent {
pub fn new(status_item: &Box<StatusItem>, config: &'static Config) -> Rc<RefCell<Self>>
{
let widget = gtk::DrawingArea::new();
widget.set_size_request(10, -1);
widget.set_vexpand(true);
let status_component = Rc::new(RefCell::new(StatusComponent {
widget: widget,
text: String::new(),
icon: String::new(),
color: Color::default(),
size_request: Cell::new(SizeRequest::Keep)
}));
{
let ref mut widget = status_component.borrow_mut().widget;
// widget.connect_button_release_event(clone!(status_component => move |widget, event| {
// status_component.borrow().button_release(widget, event)
// }));
widget.connect_draw(clone!(status_component => move |widget, cx| {
status_component.borrow().draw(widget, cx)
}));
}
let (sx, rx) = mpsc::channel::<Vec<StatusChange>>();
let update_fn = status_item.get_update_fun();
thread::spawn(move || update_fn(sx, config));
gtk::timeout_add(50, clone!(status_component => move || {
if let Ok(changes) = rx.try_recv() {
let mut comp = status_component.borrow_mut();
for change in changes {
match change {
StatusChange::Text(text) => comp.text = text,
StatusChange::Icon(icon) => comp.icon = icon,
StatusChange::Color(color) => comp.color = color,
StatusChange::Size(req) => comp.size_request.set(req),
}
}
comp.widget.queue_draw();
}
Continue(true)
}));
status_component
}
fn draw(&self, widget: >k::DrawingArea, context: &cairo::Context) -> gtk::Inhibit {
let width = widget.get_allocated_width() as f64;
let height = widget.get_allocated_height() as f64;
let margin = 12.0; // around widget
let padding = 10.0; // between icon and text
let icon = &self.icon;
let text = &self.text;
let mut used_width = margin;
if icon != "" {
let font = pango::FontDescription::from_string("Material Icons 12");
let layout = pangocairo::create_layout(context);
layout.set_text(icon, icon.len() as i32);
layout.set_font_description(Some(&font));
let extents = layout.get_extents().0;
let (icon_x, icon_y) = (extents.x as f64 / pango::SCALE as f64,
extents.y as f64 / pango::SCALE as f64);
let (icon_width, icon_height) = (extents.width as f64 / pango::SCALE as f64,
extents.height as f64 / pango::SCALE as f64);
let x = -icon_x as f64 + margin;
let y = -icon_y as f64 + height / 2.0 - icon_height / 2.0;
let Color(r, g, b, a) = self.color;
context.set_source_rgba(r, g, b, a);
context.move_to(x, y);
pangocairo::show_layout(&context, &layout);
used_width += icon_width + padding;
}
context.set_font_size(12.0);
context.select_font_face("Droid Sans Mono",
cairo::enums::FontSlant::Normal,
cairo::enums::FontWeight::Normal);
let available_space = width - used_width - margin;
let extents = context.text_extents(text);
let x = used_width + available_space / 2.0 - extents.width / 2.0 - extents.x_bearing;
let y = height / 2.0 - extents.height / 2.0 - extents.y_bearing;
let Color(r, g, b, a) = self.color;
context.set_source_rgba(r, g, b, a);
context.move_to(x, y);
context.show_text(text);
used_width += extents.x_advance + margin;
match self.size_request.get() {
SizeRequest::Expand => {
if used_width > width {
widget.set_size_request(used_width as i32, -1);
self.size_request.set(SizeRequest::Keep);
}
},
SizeRequest::Set => {
widget.set_size_request(used_width as i32, -1);
self.size_request.set(SizeRequest::Keep);
},
SizeRequest::Keep => {}
}
Inhibit(false)
}
}
|
use std::{
collections::HashMap,
fmt::{self, Display},
io::Read,
num::NonZeroU32,
path::PathBuf,
str::FromStr,
};
use clap::{ArgGroup, Parser, Subcommand};
use lading::{
blackhole,
captures::CaptureManager,
config::{Config, Telemetry},
generator::{
self,
process_tree::{self},
},
inspector, observer,
signals::Shutdown,
target::{self, Behavior, Output},
target_metrics,
};
use metrics_exporter_prometheus::PrometheusBuilder;
use rand::{rngs::StdRng, SeedableRng};
use tokio::{
runtime::Builder,
signal,
sync::broadcast,
time::{sleep, Duration},
};
use tracing::{debug, error, info, warn};
fn default_config_path() -> String {
"/etc/lading/lading.yaml".to_string()
}
fn default_target_behavior() -> Behavior {
Behavior::Quiet
}
#[derive(Default, Clone)]
struct CliKeyValues {
inner: HashMap<String, String>,
}
impl Display for CliKeyValues {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
for (k, v) in self.inner.iter() {
write!(f, "{}={},", k, v)?;
}
Ok(())
}
}
impl FromStr for CliKeyValues {
type Err = String;
fn from_str(input: &str) -> Result<Self, Self::Err> {
let pair_err = String::from("pairs must be separated by '='");
let mut labels = HashMap::new();
for kv in input.split(',') {
if kv.is_empty() {
continue;
}
let mut pair = kv.split('=');
let key = pair.next().ok_or_else(|| pair_err.clone())?;
let value = pair.next().ok_or_else(|| pair_err.clone())?;
labels.insert(key.into(), value.into());
}
Ok(Self { inner: labels })
}
}
#[derive(Parser)]
#[clap(version, about, long_about = None)]
#[clap(group(
ArgGroup::new("target")
.required(true)
.args(&["target-path", "target-pid", "no-target"]),
))]
struct Opts {
/// path on disk to the configuration file
#[clap(long, default_value_t = default_config_path())]
config_path: String,
/// additional labels to apply to all captures, format KEY=VAL,KEY2=VAL
#[clap(long)]
global_labels: Option<CliKeyValues>,
/// measure an externally-launched process by PID
#[clap(long)]
target_pid: Option<NonZeroU32>,
/// disable target measurement
#[clap(long)]
no_target: bool,
/// the path of the target executable
#[clap(long, group = "binary-target")]
target_path: Option<PathBuf>,
/// inherit the target environment variables from lading's environment
#[clap(long, requires = "binary-target", action)]
target_inherit_environment: bool,
/// additional environment variables to apply to the target, format
/// KEY=VAL,KEY2=VAL
#[clap(long, requires = "binary-target")]
target_environment_variables: Option<CliKeyValues>,
/// arguments for the target executable
#[clap(requires = "binary-target")]
target_arguments: Vec<String>,
/// the path to write target's stdout
#[clap(long, default_value_t = default_target_behavior(), requires = "binary-target")]
target_stdout_path: Behavior,
/// the path to write target's stderr
#[clap(long, default_value_t = default_target_behavior(), requires = "binary-target")]
target_stderr_path: Behavior,
/// the maximum amount of RSS bytes the target may consume before lading backs off load
#[clap(long)]
target_rss_bytes_limit: Option<byte_unit::Byte>,
/// path on disk to write captures, will override prometheus-addr if both
/// are set
#[clap(long)]
capture_path: Option<String>,
/// address to bind prometheus exporter to, will be overridden by
/// capture-path if both are set
#[clap(long)]
prometheus_addr: Option<String>,
/// the maximum time to wait, in seconds, for controlled shutdown
#[clap(long, default_value_t = 30)]
max_shutdown_delay: u16,
/// the time, in seconds, to run the target and collect samples about it
#[clap(long, default_value_t = 120)]
experiment_duration_seconds: u32,
/// the time, in seconds, to allow the target to run without collecting
/// samples
#[clap(long, default_value_t = 30)]
warmup_duration_seconds: u32,
/// whether to ignore inspector configuration, if present, and not run the inspector
#[clap(long)]
disable_inspector: bool,
/// Extra sub commands
#[clap(subcommand)]
extracmds: Option<ExtraCommands>,
}
#[derive(Subcommand, Debug)]
#[clap(hide = true)]
enum ExtraCommands {
ProcessTreeGen(ProcessTreeGen),
}
#[derive(Parser, Debug)]
#[clap(group(
ArgGroup::new("config")
.required(true)
.args(&["config-path", "config-content"]),
))]
struct ProcessTreeGen {
/// path on disk to the configuration file
#[clap(long)]
config_path: Option<PathBuf>,
/// string repesanting the configuration
#[clap(long)]
config_content: Option<String>,
}
fn get_config(ops: &Opts) -> Config {
debug!(
"Attempting to open configuration file at: {}",
ops.config_path
);
let mut file: std::fs::File = std::fs::OpenOptions::new()
.read(true)
.open(&ops.config_path)
.unwrap_or_else(|_| panic!("Could not open configuration file at: {}", &ops.config_path));
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
let mut config: Config = serde_yaml::from_str(&contents).unwrap();
if let Some(rss_bytes_limit) = ops.target_rss_bytes_limit {
target::Meta::set_rss_bytes_limit(rss_bytes_limit).unwrap();
}
let target = if ops.no_target {
None
} else if let Some(pid) = ops.target_pid {
Some(target::Config::Pid(target::PidConfig { pid }))
} else if let Some(path) = &ops.target_path {
Some(target::Config::Binary(target::BinaryConfig {
command: path.clone(),
arguments: ops.target_arguments.clone(),
inherit_environment: ops.target_inherit_environment,
environment_variables: ops
.target_environment_variables
.clone()
.unwrap_or_default()
.inner,
output: Output {
stderr: ops.target_stderr_path.clone(),
stdout: ops.target_stdout_path.clone(),
},
}))
} else {
unreachable!("clap ensures that exactly one target option is selected");
};
config.target = target;
let options_global_labels = ops.global_labels.clone().unwrap_or_default();
if let Some(ref prom_addr) = ops.prometheus_addr {
config.telemetry = Telemetry::Prometheus {
prometheus_addr: prom_addr.parse().unwrap(),
global_labels: options_global_labels.inner,
};
} else if let Some(ref capture_path) = ops.capture_path {
config.telemetry = Telemetry::Log {
path: capture_path.parse().unwrap(),
global_labels: options_global_labels.inner,
};
} else {
match config.telemetry {
Telemetry::Prometheus {
ref mut global_labels,
..
} => {
for (k, v) in options_global_labels.inner {
global_labels.insert(k, v);
}
}
Telemetry::Log {
ref mut global_labels,
..
} => {
for (k, v) in options_global_labels.inner {
global_labels.insert(k, v);
}
}
}
}
config
}
async fn inner_main(
experiment_duration: Duration,
warmup_duration: Duration,
max_shutdown_delay: Duration,
disable_inspector: bool,
config: Config,
) {
let shutdown = Shutdown::new();
// Set up the telemetry sub-system.
//
// We support two methods to exflitrate telemetry about the target from rig:
// a passive prometheus export and an active log file. Only one can be
// active at a time.
match config.telemetry {
Telemetry::Prometheus {
prometheus_addr,
global_labels,
} => {
let mut builder = PrometheusBuilder::new().with_http_listener(prometheus_addr);
for (k, v) in global_labels {
builder = builder.add_global_label(k, v);
}
builder.install().unwrap();
}
Telemetry::Log {
path,
global_labels,
} => {
let mut capture_manager = CaptureManager::new(path, shutdown.clone()).await;
capture_manager.install();
for (k, v) in global_labels {
capture_manager.add_global_label(k, v);
}
let _capmgr = tokio::spawn(capture_manager.run());
}
}
// Set up the application servers. These are, depending on configuration:
//
// * the "generator" which pushes load into
// * the "target" which is the measured system and might push load into
// * the "blackhole" which may or may not exist.
//
// There is also, maybe:
//
// * the "inspector" which is a sub-process that users can rig to inspect
// the target.
// * the "observer" which reads procfs on Linux and reports relevant process
// detail to the capture log
let (tgt_snd, _) = broadcast::channel(1);
//
// GENERATOR
//
for cfg in config.generator {
let tgt_rcv = tgt_snd.subscribe();
let generator_server = generator::Server::new(cfg, shutdown.clone()).unwrap();
let _gsrv = tokio::spawn(generator_server.run(tgt_rcv));
}
//
// INSPECTOR
//
if let Some(inspector_conf) = config.inspector {
if !disable_inspector {
let tgt_rcv = tgt_snd.subscribe();
let inspector_server =
inspector::Server::new(inspector_conf, shutdown.clone()).unwrap();
let _isrv = tokio::spawn(inspector_server.run(tgt_rcv));
}
}
//
// BLACKHOLE
//
if let Some(cfgs) = config.blackhole {
for cfg in cfgs {
let blackhole_server = blackhole::Server::new(cfg, shutdown.clone()).unwrap();
let _bsrv = tokio::spawn(async {
match blackhole_server.run().await {
Ok(()) => debug!("blackhole shut down successfully"),
Err(err) => warn!("blackhole failed with {:?}", err),
}
});
}
}
//
// TARGET METRICS
//
if let Some(cfgs) = config.target_metrics {
for cfg in cfgs {
let metrics_server = target_metrics::Server::new(cfg, shutdown.clone());
tokio::spawn(async {
match metrics_server.run().await {
Ok(()) => debug!("target_metrics shut down successfully"),
Err(err) => warn!("target_metrics failed with {:?}", err),
}
});
}
}
//
// OBSERVER
//
// Observer is not used when there is no target.
let tsrv = if let Some(target) = config.target {
let obs_rcv = tgt_snd.subscribe();
let observer_server = observer::Server::new(config.observer, shutdown.clone()).unwrap();
let _osrv = tokio::spawn(observer_server.run(obs_rcv));
//
// TARGET
//
let target_server = target::Server::new(target, shutdown.clone());
let tsrv = tokio::spawn(target_server.run(tgt_snd));
futures::future::Either::Left(tsrv)
} else {
futures::future::Either::Right(futures::future::pending())
};
let experiment_sleep = async move {
info!("target is running, now sleeping for warmup");
sleep(warmup_duration).await;
info!("warmup completed, collecting samples");
sleep(experiment_duration).await;
};
tokio::select! {
_ = signal::ctrl_c() => {
info!("received ctrl-c");
shutdown.signal().unwrap();
},
_ = experiment_sleep => {
info!("experiment duration exceeded");
shutdown.signal().unwrap();
}
res = tsrv => {
match res {
Ok(Err(e)) => {
error!("target shut down unexpectedly: {e}");
std::process::exit(1);
}
Ok(Ok(())) | Err(_) => {
// JoinError or a shutdown signal arrived
shutdown.signal().unwrap();
}
}
}
}
info!(
"Waiting for {} seconds for tasks to shutdown.",
max_shutdown_delay.as_secs(),
);
shutdown.wait(max_shutdown_delay).await;
}
fn run_process_tree(opts: ProcessTreeGen) {
let mut contents = String::new();
if let Some(path) = opts.config_path {
debug!(
"Attempting to open configuration file at: {}",
path.display()
);
let mut file: std::fs::File = std::fs::OpenOptions::new()
.read(true)
.open(&path)
.unwrap_or_else(|_| panic!("Could not open configuration file at: {}", path.display()));
file.read_to_string(&mut contents).unwrap();
} else if let Some(str) = &opts.config_content {
contents = str.to_string()
} else {
unreachable!("clap ensures that exactly one target option is selected");
};
match process_tree::get_config(&contents) {
Ok(config) => {
info!("Generating a process tree.");
let mut rng = StdRng::from_seed(config.seed);
let nodes = process_tree::generate_tree(&mut rng, &config);
process_tree::spawn_tree(&nodes, config.process_sleep_ns.get());
info!("Bye. :)");
}
Err(e) => panic!("invalide configuration: {}", e),
}
}
fn run_extra_cmds(cmds: ExtraCommands) {
match cmds {
// This command will call fork and the process must be kept fork-safe up to this point.
ExtraCommands::ProcessTreeGen(opts) => run_process_tree(opts),
}
}
fn main() {
tracing_subscriber::fmt::init();
info!("Starting lading run.");
let opts: Opts = Opts::parse();
// handle extra commands
if let Some(cmds) = opts.extracmds {
run_extra_cmds(cmds);
return;
}
let config = get_config(&opts);
let experiment_duration = Duration::from_secs(opts.experiment_duration_seconds.into());
let warmup_duration = Duration::from_secs(opts.warmup_duration_seconds.into());
// The maximum shutdown delay is shared between `inner_main` and this
// function, hence the divide by two.
let max_shutdown_delay = Duration::from_secs(opts.max_shutdown_delay.into()) / 2;
let disable_inspector = opts.disable_inspector;
let runtime = Builder::new_multi_thread()
.enable_io()
.enable_time()
.build()
.unwrap();
runtime.block_on(inner_main(
experiment_duration,
warmup_duration,
max_shutdown_delay,
disable_inspector,
config,
));
// The splunk_hec generator spawns long running tasks that are not plugged
// into the shutdown mechanism we have here. This is a bug and needs to be
// addressed. However as a workaround we explicitly shutdown the
// runtime. Even when the splunk_hec issue is addressed we'll continue this
// practice as it's a reasonable safeguard.
info!(
"Shutting down runtime with a {} second delay. May leave orphaned tasks.",
max_shutdown_delay.as_secs(),
);
runtime.shutdown_timeout(max_shutdown_delay);
info!("Bye. :)");
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn cli_key_values_deserializes_empty_string_to_empty_set() {
let val = "";
let deser = CliKeyValues::from_str(val);
let deser = deser.unwrap().to_string();
assert_eq!(deser, "");
}
#[test]
fn cli_key_values_deserializes_kv_list() {
let val = "first=one,second=two";
let deser = CliKeyValues::from_str(val);
let deser = deser.unwrap().to_string();
// CliKeyValues does not preserve order. That's okay! It's just less
// convenient to assert against.
assert!(deser == "first=one,second=two," || deser == "second=two,first=one,");
}
#[test]
fn cli_key_values_deserializes_kv_list_trailing_comma() {
let val = "first=one,";
let deser = CliKeyValues::from_str(val);
let deser = deser.unwrap().to_string();
assert_eq!(deser, "first=one,");
}
}
|
extern crate itertools;
use itertools::Itertools;
use std::collections::HashMap;
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Palindrome {
factors_tuple: Vec<(u64, u64)>,
value: u64,
}
impl Palindrome {
pub fn new(a: u64, b: u64) -> Palindrome {
let mut ft = Vec::new();
ft.push((a, b));
Palindrome {
factors_tuple: ft,
value: a * b,
}
}
pub fn value(&self) -> u64 {
self.value
}
pub fn insert(&mut self, a: u64, b: u64) {
self.factors_tuple.push((a, b));
}
}
fn is_palindrome(mut number: u64) -> bool {
// multiple of 10 cannot be a palindrome
if number % 10 == 0 {
return false;
}
// assemble the reverse of the right half of number
let mut rebmun: u64 = 0;
while rebmun < number {
// push the rightmost digit onto the reverse
rebmun = 10 * rebmun + number % 10;
// pop the rightmost digit from the number
number /= 10;
}
// match the left half with the reverse of the right
// accounting for the possibility of an odd-length number
number == rebmun || number == rebmun / 10
}
pub fn palindrome_products(min: u64, max: u64) -> Option<(Palindrome, Palindrome)> {
let mut hashmap = HashMap::new();
let mut result: Option<(Palindrome, Palindrome)> = None;
for item in (min..=max).combinations_with_replacement(2) {
let value = item[0] * item[1];
let is_palindrome: bool = is_palindrome(value);
if is_palindrome {
match hashmap.contains_key(&value) {
false => {
hashmap.insert(value, Palindrome::new(item[0], item[1]));
},
true => {
hashmap.get_mut(&value).unwrap().insert(item[0], item[1]);
}
};
}
}
if hashmap.len() > 0 {
let min = hashmap.keys().min().unwrap();
let p = hashmap.get(&min).unwrap();
let max = hashmap.keys().max().unwrap();
let q = hashmap.get(&max).unwrap();
// println!("{:?} {:?}", min, max);
result = Some((p.clone(), q.clone()));
}
result
}
|
#![allow(dead_code)]
use std::mem;
use std::result::Result;
use crate::errors::*;
const DOS_HEADER_FIELD_LEN_RES1: usize = 4 * 2;
const DOS_HEADER_FIELD_LEN_RES2: usize = 10 * 2;
#[derive(Debug, Default)]
pub struct DosHeader {
e_magic: u16, /* magic number */
e_cblp: u16, /* num of bytes on last page */
e_cp: u16, /* num of pages in file */
e_crlc: u16, /* number of relocations */
e_cparhdr: u16, /* size of header in pages */
e_minalloc: u16, /* min extra paragraphs */
e_maxalloc: u16, /* max extra paragraphs */
e_ss: u16, /* init (relative) SS value */
e_sp: u16, /* init SP value */
e_csum: u16, /* checksum */
e_ip: u16, /* init IP value */
e_cs: u16, /* init (relative) CS value */
e_lfarlc: u16, /* addr relocation table */
e_ovno: u16, /* overlay number */
e_res: [u8; DOS_HEADER_FIELD_LEN_RES1], /* (reserved) */
e_oemid: u16, /* OEM identifier */
e_oeminfo: u16, /* OEM information */
e_res2: [u8; DOS_HEADER_FIELD_LEN_RES2], /* (reserved) */
e_lfanew: u32 /* file address of PE header */
}
impl DosHeader {
pub fn get_magic(&self) -> u16 {
self.e_magic
}
pub fn set_magic(&mut self, magic: u16) {
self.e_magic = magic;
}
pub fn get_cblp(&self) -> u16 {
self.e_cblp
}
pub fn set_cblp(&mut self, cblp: u16) {
self.e_cblp = cblp;
}
pub fn get_cp(&self) -> u16 {
self.e_cp
}
pub fn set_cp(&mut self, cp: u16) {
self.e_cp = cp;
}
pub fn get_crlc(&self) -> u16 {
self.e_crlc
}
pub fn set_crlc(&mut self, crlc: u16) {
self.e_crlc = crlc;
}
pub fn get_cparhdr(&self) -> u16 {
self.e_cparhdr
}
pub fn set_cparhdr(&mut self, cparhdr: u16) {
self.e_cparhdr = cparhdr;
}
pub fn get_minalloc(&self) -> u16 {
self.e_minalloc
}
pub fn set_minalloc(&mut self, minalloc: u16) {
self.e_minalloc = minalloc;
}
pub fn get_maxalloc(&self) -> u16 {
self.e_maxalloc
}
pub fn set_maxalloc(&mut self, maxalloc: u16) {
self.e_maxalloc = maxalloc;
}
pub fn get_ss(&self) -> u16 {
self.e_ss
}
pub fn set_ss(&mut self, ss: u16) {
self.e_ss = ss;
}
pub fn get_sp(&self) -> u16 {
self.e_sp
}
pub fn set_sp(&mut self, sp: u16) {
self.e_sp = sp;
}
pub fn get_csum(&self) -> u16 {
self.e_csum
}
pub fn set_csum(&mut self, csum: u16) {
self.e_csum = csum;
}
pub fn get_ip(&self) -> u16 {
self.e_ip
}
pub fn set_ip(&mut self, ip: u16) {
self.e_ip = ip;
}
pub fn get_cs(&self) -> u16 {
self.e_cs
}
pub fn set_cs(&mut self, cs: u16) {
self.e_cs = cs;
}
pub fn get_lfarlc(&self) -> u16 {
self.e_lfarlc
}
pub fn set_lfarlc(&mut self, lfarlc: u16) {
self.e_lfarlc = lfarlc;
}
pub fn get_ovno(&self) -> u16 {
self.e_ovno
}
pub fn set_ovno(&mut self, ovno: u16) {
self.e_ovno = ovno;
}
pub fn get_res(&self) -> [u8; DOS_HEADER_FIELD_LEN_RES1] {
self.e_res
}
pub fn set_res(&mut self, res: [u8; DOS_HEADER_FIELD_LEN_RES1]) {
self.e_res = res;
}
pub fn get_oemid(&self) -> u16 {
self.e_oemid
}
pub fn set_oemid(&mut self, oemid: u16) {
self.e_oemid = oemid;
}
pub fn get_oeminfo(&self) -> u16 {
self.e_oeminfo
}
pub fn set_oeminfo(&mut self, oeminfo: u16) {
self.e_oeminfo = oeminfo;
}
pub fn get_res2(&self) -> [u8; DOS_HEADER_FIELD_LEN_RES2] {
self.e_res2
}
pub fn set_res2(&mut self, res2: [u8; DOS_HEADER_FIELD_LEN_RES2]) {
self.e_res2 = res2;
}
pub fn get_lfanew(&self) -> u32 {
self.e_lfanew
}
pub fn set_lfanew(&mut self, lfanew: u32) {
self.e_lfanew = lfanew;
}
pub fn from_be_bytes(bytes: &[u8]) -> Result<DosHeader, ButylError> {
if bytes.len() < mem::size_of::<DosHeader>() { /* bounds check */
return Err(ButylError::InsufficientDataError);
}
let mut dos_header: DosHeader = DosHeader::default();
dos_header.e_magic = ((bytes[0] as u16) << 8) | bytes[1] as u16;
dos_header.e_cblp = ((bytes[2] as u16) << 8) | bytes[3] as u16;
dos_header.e_cp = ((bytes[4] as u16) << 8) | bytes[5] as u16;
dos_header.e_crlc = ((bytes[6] as u16) << 8) | bytes[7] as u16;
dos_header.e_cparhdr = ((bytes[8] as u16) << 8) | bytes[9] as u16;
dos_header.e_minalloc = ((bytes[10] as u16) << 8) | bytes[11] as u16;
dos_header.e_maxalloc = ((bytes[12] as u16) << 8) | bytes[13] as u16;
dos_header.e_ss = ((bytes[14] as u16) << 8) | bytes[15] as u16;
dos_header.e_sp = ((bytes[16] as u16) << 8) | bytes[17] as u16;
dos_header.e_csum = ((bytes[18] as u16) << 8) | bytes[19] as u16;
dos_header.e_ip = ((bytes[20] as u16) << 8) | bytes[21] as u16;
dos_header.e_cs = ((bytes[22] as u16) << 8) | bytes[23] as u16;
dos_header.e_lfarlc = ((bytes[24] as u16) << 8) | bytes[25] as u16;
dos_header.e_ovno = ((bytes[26] as u16) << 8) | bytes[27] as u16;
dos_header.e_res = [bytes[28], bytes[29], bytes[30], bytes[31],
bytes[32], bytes[33], bytes[34], bytes[35]];
dos_header.e_oemid = ((bytes[36] as u16) << 8) as u16 |
bytes[37] as u16;
dos_header.e_oeminfo = ((bytes[38] as u16) << 8) | bytes[39] as u16;
dos_header.e_res2 = [
bytes[40], bytes[41], bytes[42], bytes[43],
bytes[44], bytes[45], bytes[46], bytes[47],
bytes[48], bytes[49], bytes[50], bytes[51],
bytes[52], bytes[53], bytes[54], bytes[55],
bytes[56], bytes[57], bytes[58], bytes[59],
];
dos_header.e_lfanew = ((bytes[60] as u32) << 24) as u32 |
((bytes[61] as u32) << 16) as u32 |
((bytes[62] as u32) << 8) as u32 |
bytes[63] as u32;
Ok(dos_header)
}
pub fn from_le_bytes(bytes: &[u8]) -> Result<DosHeader, ButylError> {
if bytes.len() < mem::size_of::<DosHeader>() { /* bounds check */
return Err(ButylError::InsufficientDataError);
}
let mut dos_header: DosHeader = DosHeader::default();
dos_header.e_magic = ((bytes[1] as u16) << 8) | bytes[0] as u16;
dos_header.e_cblp = ((bytes[3] as u16) << 8) | bytes[2] as u16;
dos_header.e_cp = ((bytes[5] as u16) << 8) | bytes[4] as u16;
dos_header.e_crlc = ((bytes[7] as u16) << 8) | bytes[6] as u16;
dos_header.e_cparhdr = ((bytes[9] as u16) << 8) | bytes[8] as u16;
dos_header.e_minalloc = ((bytes[11] as u16) << 8) | bytes[10] as u16;
dos_header.e_maxalloc = ((bytes[13] as u16) << 8) | bytes[12] as u16;
dos_header.e_ss = ((bytes[15] as u16) << 8) | bytes[14] as u16;
dos_header.e_sp = ((bytes[17] as u16) << 8) | bytes[16] as u16;
dos_header.e_csum = ((bytes[19] as u16) << 8) | bytes[18] as u16;
dos_header.e_ip = ((bytes[21] as u16) << 8) | bytes[20] as u16;
dos_header.e_cs = ((bytes[23] as u16) << 8) | bytes[22] as u16;
dos_header.e_lfarlc = ((bytes[25] as u16) << 8) | bytes[24] as u16;
dos_header.e_ovno = ((bytes[27] as u16) << 8) | bytes[26] as u16;
dos_header.e_res = [
bytes[35], bytes[34], bytes[33], bytes[32],
bytes[31], bytes[30], bytes[29], bytes[28]
];
dos_header.e_oemid = ((bytes[37] as u16) << 8) as u16 |
bytes[36] as u16;
dos_header.e_oeminfo = ((bytes[39] as u16) << 8) | bytes[38] as u16;
dos_header.e_res2 = [
bytes[59], bytes[58], bytes[57], bytes[56],
bytes[55], bytes[54], bytes[53], bytes[52],
bytes[51], bytes[50], bytes[49], bytes[48],
bytes[47], bytes[46], bytes[45], bytes[44],
bytes[43], bytes[42], bytes[41], bytes[40],
];
dos_header.e_lfanew = ((bytes[63] as u32) << 24) as u32 |
((bytes[62] as u32) << 16) as u32 |
((bytes[61] as u32) << 8) as u32 |
bytes[60] as u32;
Ok(dos_header)
}
}
#[derive(Debug)]
pub struct DosFile<'a> {
header: DosHeader,
data: &'a[u8]
}
impl<'a> DosFile<'a> {
pub fn from_be_bytes(data: &[u8]) -> Result<DosFile, ButylError> {
Ok(DosFile {
header: DosHeader::from_be_bytes(data)?,
data: data
})
}
pub fn from_le_bytes(data: &[u8]) -> Result<DosFile, ButylError> {
Ok(DosFile {
header: DosHeader::from_le_bytes(data)?,
data: data
})
}
}
|
#[doc = "Register `TXDR` writer"]
pub type W = crate::W<TXDR_SPEC>;
#[doc = "Field `TXDR` writer - transmit data register The register serves as an interface with TxFIFO. A write to it accesses TxFIFO. Note: In SPI mode, data is always right-aligned. Alignment of data at I2S mode depends on DATLEN and DATFMT setting. Unused bits are ignored when writing to the register, and read as zero when the register is read. Note: DR can be accessed byte-wise (8-bit access): in this case only one data-byte is written by single access. halfword-wise (16 bit access) in this case 2 data-bytes or 1 halfword-data can be written by single access. word-wise (32 bit access). In this case 4 data-bytes or 2 halfword-data or word-data can be written by single access. Write access of this register less than the configured data size is forbidden."]
pub type TXDR_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 32, O, u32>;
impl W {
#[doc = "Bits 0:31 - transmit data register The register serves as an interface with TxFIFO. A write to it accesses TxFIFO. Note: In SPI mode, data is always right-aligned. Alignment of data at I2S mode depends on DATLEN and DATFMT setting. Unused bits are ignored when writing to the register, and read as zero when the register is read. Note: DR can be accessed byte-wise (8-bit access): in this case only one data-byte is written by single access. halfword-wise (16 bit access) in this case 2 data-bytes or 1 halfword-data can be written by single access. word-wise (32 bit access). In this case 4 data-bytes or 2 halfword-data or word-data can be written by single access. Write access of this register less than the configured data size is forbidden."]
#[inline(always)]
#[must_use]
pub fn txdr(&mut self) -> TXDR_W<TXDR_SPEC, 0> {
TXDR_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "SPI/I2S transmit data register\n\nYou can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`txdr::W`](W). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct TXDR_SPEC;
impl crate::RegisterSpec for TXDR_SPEC {
type Ux = u32;
}
#[doc = "`write(|w| ..)` method takes [`txdr::W`](W) writer structure"]
impl crate::Writable for TXDR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets TXDR to value 0"]
impl crate::Resettable for TXDR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
//Kata: https://www.codewars.com/kata/5511b2f550906349a70004e1/train/rust
pub fn last_digit(str1: &str, str2: &str) -> i32 {
if str2 == "0" {
return 1;
}
let divisibility_criterion = |module: u32| -> Option<u32> {
if module == 2 {
return Some(str2[&str2.len() - 1..str2.len()].parse::<u32>().unwrap());
}
if module == 4 {
if str2.len() > 1 {
return Some(str2[&str2.len() - 2..str2.len()].parse::<u32>().unwrap());
} else {
return Some(str2[&str2.len() - 1..str2.len()].parse::<u32>().unwrap());
}
}
None
};
let last_digit_of_base: u8 = str1[str1.len() - 1..str1.len()].parse().unwrap();
match last_digit_of_base {
//Only 1 module numbers
0 => 0,
1 => 1,
5 => 5,
6 => 6,
// 2 module numbers
4 | 9 => {
let module: u32 = 2;
((last_digit_of_base.pow(divisibility_criterion(module).unwrap() % module)) % 10) as i32
}
// 4 module numbers
2 | 3 | 7 | 8 => {
let module: u32 = 4;
((last_digit_of_base.pow(divisibility_criterion(module).unwrap() % module)) % 10) as i32
}
_ => 0,
}
}
|
extern crate libc;
pub struct Player {
id: i32
}
impl Player {
pub fn new() -> Player {
Player{
id: 0
}
}
} |
//! # Firestore document access and Firebase Auth
//!
//! This crate allows you to easily access Google Firestore documents
//! and handles all the finicky authentication details for you.
extern crate regex;
extern crate ring;
extern crate untrusted;
#[cfg(feature = "faststart")]
extern crate bincode;
pub mod credentials;
pub mod errors;
pub mod sessions;
pub mod documents;
mod dto;
pub mod rocket;
pub mod users;
/// Use the firebase documents API with this auth bearer
pub trait FirebaseAuthBearer<'a> {
fn projectid(&'a self) -> &'a str;
fn bearer(&'a mut self) -> &'a str;
}
#[cfg(test)]
mod tests {
use super::*;
use serde::{Deserialize, Serialize};
#[test]
fn service_account_session() -> errors::Result<()> {
let cred = credentials::Credentials::from_file("firebase-service-account.json")
.expect("Read credentials file");
assert!(cred.public_key(&cred.private_key_id).is_some());
let mut session = sessions::service_account::Session::new(cred).unwrap();
let b = session.bearer().to_owned();
// Check if cached value is used
assert_eq!(session.bearer(), &b);
let obj = DemoDTO {
a_string: "abcd".to_owned(),
an_int: 14,
a_timestamp: chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Nanos, true),
};
documents::write(&mut session, "tests", Some("service_test"), &obj)?;
let read: DemoDTO = documents::read(&mut session, "tests", "service_test")?;
assert_eq!(read.a_string, "abcd");
assert_eq!(read.an_int, 14);
Ok(())
}
#[derive(Debug, Serialize, Deserialize)]
struct DemoDTO {
a_string: String,
an_int: u32,
a_timestamp: String,
}
#[test]
fn user_account_session() -> errors::Result<()> {
let test_user_id = "Io2cPph06rUWM3ABcIHguR3CIw6v1";
let cred = credentials::Credentials::from_file("firebase-service-account.json")
.expect("Read credentials file");
// Read refresh token from file if possible instead of generating a new refresh token each time
let refresh_token: String = match std::fs::read_to_string("refresh-token-for-tests.txt") {
Ok(v) => v,
Err(e) => {
if e.kind() != std::io::ErrorKind::NotFound {
return Err(errors::FirebaseError::IO(e));
}
String::new()
}
};
// Generate a new refresh token if necessary
let user_session: sessions::user::Session = if refresh_token.is_empty() {
let session = sessions::user::Session::by_user_id(&cred, test_user_id)?;
std::fs::write(
"refresh-token-for-tests.txt",
&session.refresh_token.as_ref().unwrap(),
)?;
session
} else {
sessions::user::Session::by_refresh_token(&cred, &refresh_token)?
};
assert_eq!(user_session.userid, test_user_id);
assert_eq!(user_session.projectid, cred.project_id);
let mut user_session =
sessions::user::Session::by_access_token(&cred, &user_session.bearer)?;
assert_eq!(user_session.userid, test_user_id);
let obj = DemoDTO {
a_string: "abc".to_owned(),
an_int: 12,
a_timestamp: chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Nanos, true),
};
// Test writing
let result = documents::write(&mut user_session, "tests", Some("test"), &obj)?;
assert_eq!(result.document_id, "test");
let duration = chrono::Utc::now().signed_duration_since(result.update_time.unwrap());
assert!(
duration.num_seconds() < 60,
"now = {}, updated: {}, created: {}",
chrono::Utc::now(),
result.update_time.unwrap(),
result.create_time.unwrap()
);
// Test reading
let read: DemoDTO = documents::read(&mut user_session, "tests", "test")?;
assert_eq!(read.a_string, "abc");
assert_eq!(read.an_int, 12);
let user_info_container = users::userinfo(&user_session)?;
assert_eq!(
user_info_container.users[0].localId.as_ref().unwrap(),
test_user_id
);
// Query for all documents with field "a_string" and value "abc"
let results: Vec<DemoDTO> = documents::query(
&mut user_session,
"tests",
"abc",
dto::FieldOperator::EQUAL,
"a_string",
)?;
assert_eq!(results.len(), 1);
assert_eq!(results[0].a_string, "abc");
let mut count = 0;
let list_it: documents::List<DemoDTO, _> = documents::list(&mut user_session, "tests");
for doc in list_it {
count += 1;
}
assert_eq!(count, 2);
documents::delete(&mut user_session, "tests/test")?;
// Check if document is indeed removed
let results: Vec<DemoDTO> = documents::query(
&mut user_session,
"tests",
"abc",
dto::FieldOperator::EQUAL,
"a_string",
)?;
assert_eq!(results.len(), 0);
Ok(())
}
}
|
#![warn(clippy::all)]
//! Core data structures for working with point cloud data
//!
//! Pasture provides data structures for reading, writing and in-memory handling of arbitrary point cloud data.
//! The best way to get started with Pasture is to look at the [example code](https://github.com/Mortano/pasture/tree/main/pasture-core/examples).
//! For understanding Pasture, it is best to look at the [PointLayout](crate::layout::PointLayout) type and the [containers](crate::containers) module.
pub extern crate nalgebra;
extern crate self as pasture_core;
pub mod containers;
/// Defines attributes and data layout of point cloud data
pub mod layout;
/// Useful mathematical tools when working with point clooud data
pub mod math;
/// Data structures for handling point cloud metadata
pub mod meta;
/// Utilities
pub mod util;
|
use std::{cell::UnsafeCell, ffi::CString, marker::PhantomData, ops::Deref};
use anyhow::Result;
use necsim_core::{
cogs::{
CoalescenceSampler, DispersalSampler, EmigrationExit, Habitat, ImmigrationEntry,
LineageReference, LineageStore, MinSpeciationTrackingEventSampler, PrimeableRng,
SingularActiveLineageSampler, SpeciationProbability, TurnoverRate,
},
reporter::boolean::Boolean,
};
use rust_cuda::{
rustacuda::{function::Function, module::Module},
rustacuda_core::DeviceCopy,
};
use rust_cuda::{common::RustToCuda, host::CudaDropWrapper};
use rustcoalescence_algorithms_cuda_kernel_ptx_jit::host::compiler::PtxJITCompiler;
use super::{specialiser, SimulationKernel};
impl<
'k,
H: Habitat + RustToCuda,
G: PrimeableRng + RustToCuda,
R: LineageReference<H> + DeviceCopy,
S: LineageStore<H, R> + RustToCuda,
X: EmigrationExit<H, G, R, S> + RustToCuda,
D: DispersalSampler<H, G> + RustToCuda,
C: CoalescenceSampler<H, R, S> + RustToCuda,
T: TurnoverRate<H> + RustToCuda,
N: SpeciationProbability<H> + RustToCuda,
E: MinSpeciationTrackingEventSampler<H, G, R, S, X, D, C, T, N> + RustToCuda,
I: ImmigrationEntry + RustToCuda,
A: SingularActiveLineageSampler<H, G, R, S, X, D, C, T, N, E, I> + RustToCuda,
ReportSpeciation: Boolean,
ReportDispersal: Boolean,
> SimulationKernel<'k, H, G, R, S, X, D, C, T, N, E, I, A, ReportSpeciation, ReportDispersal>
{
pub fn with_kernel<Q, F>(ptx_jit: bool, inner: F) -> Result<Q>
where
for<'s> F: FnOnce(
&'s mut SimulationKernel<
's,
H,
G,
R,
S,
X,
D,
C,
T,
N,
E,
I,
A,
ReportSpeciation,
ReportDispersal,
>,
) -> Result<Q>,
{
// Load the module PTX &CStr containing the kernel function
let ptx_cstr = specialiser::get_ptx_cstr::<
H,
G,
R,
S,
X,
D,
C,
T,
N,
E,
I,
A,
ReportSpeciation,
ReportDispersal,
>();
// Initialise the PTX JIT compiler with the original PTX source string
let mut compiler = PtxJITCompiler::new(ptx_cstr);
// Compile the CUDA module
#[allow(unused_mut)]
let mut module =
UnsafeCell::new(CudaDropWrapper::from(Module::load_from_string(ptx_cstr)?));
// Load the kernel function from the module
let mut entry_point =
unsafe { &*module.get() }.get_function(&CString::new("simulate").unwrap())?;
// Safety: the mut `module` is only safe because:
// - `entry_point` is always dropped before `module` replaced
// - neither are mutably changed internally, only replaced
let mut kernel = SimulationKernel {
compiler: &mut compiler,
ptx_jit,
module: unsafe { &mut *module.get() },
entry_point: &mut entry_point,
marker: PhantomData::<(
H,
G,
R,
S,
X,
D,
C,
T,
N,
E,
I,
A,
ReportSpeciation,
ReportDispersal,
)>,
};
inner(&mut kernel)
}
pub fn function(&self) -> &Function {
self.entry_point
}
}
impl<
'k,
H: Habitat + RustToCuda,
G: PrimeableRng + RustToCuda,
R: LineageReference<H> + DeviceCopy,
S: LineageStore<H, R> + RustToCuda,
X: EmigrationExit<H, G, R, S> + RustToCuda,
D: DispersalSampler<H, G> + RustToCuda,
C: CoalescenceSampler<H, R, S> + RustToCuda,
T: TurnoverRate<H> + RustToCuda,
N: SpeciationProbability<H> + RustToCuda,
E: MinSpeciationTrackingEventSampler<H, G, R, S, X, D, C, T, N> + RustToCuda,
I: ImmigrationEntry + RustToCuda,
A: SingularActiveLineageSampler<H, G, R, S, X, D, C, T, N, E, I> + RustToCuda,
ReportSpeciation: Boolean,
ReportDispersal: Boolean,
> Deref
for SimulationKernel<'k, H, G, R, S, X, D, C, T, N, E, I, A, ReportSpeciation, ReportDispersal>
{
type Target = Module;
fn deref(&self) -> &Self::Target {
self.module
}
}
|
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
use std::fmt;
use std::ops::Range;
/// Error type for resolving a target URI against a base URI.
///
/// Emitted by [`AnyUriRef::write_resolved`], [`AnyUriRef::resolved`],
/// and a few others.
///
/// [`AnyUriRef::write_resolved`]: async-coap-uri::AnyUriRef::write_resolved
/// [`AnyUriRef::resolved`]: async-coap-uri::AnyUriRef::resolved
#[derive(Debug, Eq, PartialEq, Hash, Clone, Copy)]
pub enum ResolveError {
/// The URI-reference being given as a base cannot be used as a base for the given
/// target URI-reference.
CannotBeABase,
/// Unable to write to the given [`core::fmt::Write`] instance.
WriteFailure,
}
impl fmt::Display for ResolveError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match &self {
Self::CannotBeABase => write!(
f,
"given uri-ref cannot be used as a base for the target uri-ref"
),
Self::WriteFailure => write!(f, "unable to write to the given `fmt::Write` instance"),
}
}
}
#[cfg(feature = "std")]
impl ::std::error::Error for ResolveError {}
/// Transparent conversions from [`core::fmt::Error`] to [`ResolveError`].
impl From<::core::fmt::Error> for ResolveError {
fn from(_: ::core::fmt::Error) -> Self {
ResolveError::WriteFailure
}
}
/// URI parse error type.
///
/// This type indicates the details of an error that occurs while parsing a URI.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct ParseError {
desc: ParseErrorKind,
span: Option<Range<usize>>,
}
impl ParseError {
/// Constructor for URI parse errors.
pub fn new(desc: &'static str, span: Option<Range<usize>>) -> ParseError {
ParseError {
desc: desc.into(),
span,
}
}
/// The location in the input string of the error. Optional.
pub fn span(&self) -> Option<Range<usize>> {
self.span.clone()
}
/// A debugging description of the error.
pub fn desc(&self) -> &'static str {
self.desc.as_str()
}
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.desc)
}
}
#[cfg(feature = "std")]
impl ::std::error::Error for ParseError {}
impl From<crate::escape::UnescapeError> for ParseError {
fn from(error: crate::escape::UnescapeError) -> Self {
Self {
span: Some(error.index..error.index + 1),
desc: ParseErrorKind::EncodingError(error),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
enum ParseErrorKind {
/// Bad percent encoding or illegal characters
EncodingError(crate::escape::UnescapeError),
/// Missing scheme or authority
MissingSchemeOrAuthority,
/// Cannot find URI components
MissingUriComponents,
/// Invalid URI scheme
InvalidUriScheme,
/// Not a URI
InvalidUri,
#[allow(dead_code)]
Custom { desc: &'static str },
}
impl From<&'static str> for ParseErrorKind {
fn from(desc: &'static str) -> Self {
match desc {
"Missing scheme or authority" => Self::MissingSchemeOrAuthority,
"Cannot find URI components" => Self::MissingUriComponents,
"Invalid URI scheme" => Self::InvalidUriScheme,
"Not a URI" => Self::InvalidUri,
_ => Self::Custom { desc },
}
}
}
impl ParseErrorKind {
pub fn as_str(&self) -> &'static str {
match self {
Self::EncodingError(_) => "Bad percent encoding or illegal characters",
Self::MissingSchemeOrAuthority => "Missing scheme or authority",
Self::MissingUriComponents => "Cannot find URI components",
Self::InvalidUriScheme => "Invalid URI scheme",
Self::InvalidUri => "Not a URI",
Self::Custom { desc } => desc,
}
}
}
impl fmt::Display for ParseErrorKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Self::EncodingError(e) = &self {
write!(f, "{}", e)
} else {
write!(f, "{}", self.as_str())
}
}
}
|
use crate::instructions;
use crate::instructions::{ InstrThumb16 };
use crate::memory::{ Register, RegisterBank, Memory };
use crate::loader::ProgramImage;
/// ARMv7-M virtual processor
///
/// Registers:
/// [ R0 ]: General purpose Thumb16 addressable
/// [ R1 ]: General purpose Thumb16 addressable
/// [ R2 ]: General purpose Thumb16 addressable
/// [ R3 ]: General purpose Thumb16 addressable
/// [ R4 ]: General purpose Thumb16 addressable
/// [ R5 ]: General purpose Thumb16 addressable
/// [ R6 ]: General purpose Thumb16 addressable
/// [ R7 ]: General purpose Thumb16 addressable
/// [ R8 ]: General purpose
/// [ R9 ]: General purpose
/// [ R10 ]: General purpose
/// [ R11 ]: General purpose
/// [ R12 ]: General purpose
/// [ R13 ]: Stack Pointer
/// [ R14 ]: Link Register
/// [ R15 ]: Program Counter
pub struct Processor {
dct: [InstrThumb16; instructions::NUM_TH16_INSTRUCTIONS],
reg: RegisterBank,
mem: Memory,
reset: usize,
}
impl Processor {
pub fn new() -> Processor {
Processor {
dct: [InstrThumb16::Undefined; instructions::NUM_TH16_INSTRUCTIONS],
reg: RegisterBank::new(),
mem: Memory::alloc(0),
reset: 0,
}
}
pub fn init(&mut self) {
self.dct = InstrThumb16::generate_decode_table();
use std::io::prelude::*;
let mut file = std::fs::File::create("decode_table_new.rs").unwrap();
for (idx, item) in self.dct.iter().enumerate() {
writeln!(file, "{:#06x} // {:?} {:#018b}", idx, item, idx).unwrap();
}
}
pub fn load(&mut self, image: ProgramImage) {
self.reset = image.entry();
self.mem = image.into_raw_image();
}
pub fn reset(&mut self) {
self.reg[Register::PC] = self.reset as u32 - 1;
}
pub fn run(&mut self) {
self.fde_loop()
}
fn fetch(&self) -> u16 {
let at = self.reg[Register::PC] as usize;
self.mem.read_u16(at)
}
fn decode(&self, instruction: u16) -> InstrThumb16 {
let decoded = self.dct[instruction as usize];
decoded
}
/// Steps to execute an instruction
///
/// -> Fetch 16 bit instruction from program memory, pointed to by R15
/// -> Decode 16 bit instruction via DCT lookup
/// -> Match instruction to appropriate execution branch
/// -> IF Thumb2 extended instruction, fetch second part of instruction
/// -> Decode 32 bit instruction via tree search
/// -> Execute 32 bit instruction
/// -> IF standard Thumb16 instruction, execute instruction
///
fn fde_loop(&mut self) {
let mut cycles = 0;
let debug_cycle_limit = 10;
println!("Beginning execution...");
// Core execution loop
loop {
print!("[PC: {:06X}] ", self.reg[Register::PC]);
let fetched = self.fetch();
let decoded = self.decode(fetched);
print!("{:016b} {:04X} ", fetched, fetched);
match decoded {
InstrThumb16::BranchE1{ cond, imm } => {
let target = (imm as i8) as i32;
print!("exec branch e1: [cond, target] = [{:04X}, {:#06X}] ({}:{})", cond, target, cond, imm);
self.reg[Register::PC] = ((self.reg[Register::PC] as i32) + target) as u32;
},
u => {
print!("unhandled instruction: {:?}", u)
}
}
print!("\n");
cycles = cycles + 1;
if cycles >= debug_cycle_limit {
break;
}
}
}
}
|
//! Sky properties.
use crate::input::Sky;
use arctk::{err::Error, file::Build, img::GradientBuilder, math::Pos3};
use arctk_attr::input;
use std::path::Path;
/// Scene properties.
#[input]
pub struct SkyBuilder {
/// Sky brightness fraction.
brightness: f64,
/// Sun position when calculating sun shadows [m].
sun_pos: Pos3,
/// Sun angular radius when calculating soft shadows [deg].
sun_rad: f64,
/// Sky colour gradient.
grad: GradientBuilder,
}
impl Build for SkyBuilder {
type Inst = Sky;
#[inline]
fn build(self, in_dir: &Path) -> Result<Self::Inst, Error> {
Ok(Self::Inst::new(
self.brightness,
self.sun_pos,
self.sun_rad.to_radians(),
self.grad.build(in_dir)?,
))
}
}
|
use std::fmt;
use std::net::TcpStream;
use std::io::{Read, Write};
pub struct HttpHeader {
pub method: String,
pub path: String,
pub version: String,
}
impl HttpHeader {
pub fn new(stream: &mut TcpStream) -> Option<HttpHeader> {
let mut st = String::new();
loop {
let mut temp = [0; 256];
match stream.read(&mut temp) {
Ok(0) => {break;},
Ok(m) => {
st.push_str(&String::from_utf8_lossy(&temp[0..m]));
match st.find("\r\n\r\n") {
Some(_) => {
break;
},
None => continue
}
},
Err(e) => println!("{:?}", e)
}
}
if !st.is_empty() {
let st: Vec<&str> = st.lines().next().unwrap().split_whitespace().collect();
Some(HttpHeader { method: st[0].to_string(), path: st[1].to_string(), version: st[2].to_string() })
} else {
None
}
}
}
impl fmt::Display for HttpHeader {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Method: {} \r\nPath: {}\r\nVersion {}", self.method, self.path, self.version)
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.