text stringlengths 8 4.13M |
|---|
/*!
Miscellaneous utility code.
*/
use std::ffi::CString;
use std::fs;
use std::io;
use std::mem::size_of;
use std::path::{Path, PathBuf};
pub trait BoolUtil {
fn as_either<T>(&self, if_true: T, if_false: T) -> T;
}
impl BoolUtil for bool {
fn as_either<T>(&self, if_true: T, if_false: T) -> T {
if *self { if_true } else { if_false }
}
}
pub trait CheckedFrom<T> {
fn checked_from(T) -> Self;
}
impl CheckedFrom<usize> for u32 {
fn checked_from(v: usize) -> u32 {
if ! (::std::u32::MIN as usize <= v) {
panic!("underflow on conversion from usize {} to u32", v);
} else if ! (v <= ::std::u32::MAX as usize) {
panic!("overflow on conversion from usize {} to u32", v);
} else {
v as u32
}
}
}
impl CheckedFrom<i64> for usize {
fn checked_from(v: i64) -> usize {
if ! (::std::usize::MIN as i64 <= v) {
panic!("underflow on conversion from i64 {} to usize", v);
} else if ! {
match size_of::<usize>() < size_of::<i64>() {
true => v <= ::std::usize::MAX as i64,
false => true
}
} {
panic!("overflow on conversion from i64 {} to usize", v);
} else {
v as usize
}
}
}
impl CheckedFrom<usize> for i32 {
fn checked_from(v: usize) -> i32 {
if ! (v <= ::std::i32::MAX as usize) {
panic!("overflow on conversion from usize {} to i32", v);
} else {
v as i32
}
}
}
pub trait CheckedInto<T> {
fn checked_into(self) -> T;
}
impl<T, U> CheckedInto<U> for T where U: CheckedFrom<T> {
fn checked_into(self) -> U {
CheckedFrom::checked_from(self)
}
}
pub trait PathBufExt {
fn normalize_path_sep(&mut self);
}
impl PathBufExt for PathBuf {
fn normalize_path_sep(&mut self) {
// TODO: also resolve to some sort of absolute path.
let s = ::std::mem::replace(self, PathBuf::new())
.into_os_string()
.into_string().unwrap_or_else(|oss| oss.to_string_lossy().into_owned());
let mut bs = s.into_bytes();
for b in &mut bs {
if *b == '/' as u8 { *b = '\\' as u8 }
}
// Back we go!
let p = String::from_utf8(bs).unwrap().into();
*self = p;
}
}
/**
This is because I am too lazy to write my own damn monad type.
This makes a `Result<Option<T>, E>` behave exactly like `Option<T>`, except that errors will always short-circuit.
*/
pub trait ResultOptionExt<T, E> {
fn ro_or_else<F>(self, f: F) -> Result<Option<T>, E>
where F: FnOnce() -> Result<Option<T>, E>;
fn ro_and_then<T1, F>(self, f: F) -> Result<Option<T1>, E>
where F: FnOnce(T) -> Result<Option<T1>, E>;
fn ro_map<T1, F>(self, f: F) -> Result<Option<T1>, E>
where F: FnOnce(T) -> T1;
}
impl<T, E> ResultOptionExt<T, E> for Result<Option<T>, E> {
fn ro_or_else<F>(self, f: F) -> Result<Option<T>, E>
where F: FnOnce() -> Result<Option<T>, E> {
match self {
Ok(Some(v)) => Ok(Some(v)),
Ok(None) => f(),
Err(err) => Err(err)
}
}
fn ro_and_then<T1, F>(self, f: F) -> Result<Option<T1>, E>
where F: FnOnce(T) -> Result<Option<T1>, E> {
match self {
Ok(Some(v)) => f(v),
Ok(None) => Ok(None),
Err(err) => Err(err)
}
}
fn ro_map<T1, F>(self, f: F) -> Result<Option<T1>, E>
where F: FnOnce(T) -> T1 {
match self {
Ok(Some(v)) => Ok(Some(f(v))),
Ok(None) => Ok(None),
Err(err) => Err(err)
}
}
}
pub trait ToCStr {
fn to_c_str(&self) -> CString;
}
impl<'a> ToCStr for &'a str {
fn to_c_str(&self) -> CString {
CString::new(*self).unwrap()
}
}
impl ToCStr for String {
fn to_c_str(&self) -> CString {
CString::new(self.clone()).unwrap()
}
}
pub trait TryFrom<T> {
fn try_from(T) -> Option<Self>;
}
pub trait TryInto<U> {
fn try_into(self) -> Option<U>;
}
impl<T, U> TryInto<U> for T where U: TryFrom<T> {
fn try_into(self) -> Option<U> {
TryFrom::try_from(self)
}
}
pub fn read_lines<P: AsRef<Path>>(path: P) -> io::Lines<io::BufReader<fs::File>> {
use std::io::BufRead;
io::BufReader::new(fs::File::open(path).unwrap()).lines()
}
|
// Copyright 2019, 2020 Wingchain
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
pub const CONSENSUS_POA: &str = "poa";
pub const CONSENSUS_RAFT: &str = "raft";
pub const CONSENSUS_HOTSTUFF: &str = "hotstuff";
pub const CONSENSUS_LIST: [&str; 3] = [CONSENSUS_POA, CONSENSUS_RAFT, CONSENSUS_HOTSTUFF];
|
#[doc = "Register `ESR` reader"]
pub type R = crate::R<ESR_SPEC>;
#[doc = "Field `BTE` reader - Bit timing error"]
pub type BTE_R = crate::BitReader;
#[doc = "Field `BPE` reader - Bit period error"]
pub type BPE_R = crate::BitReader;
#[doc = "Field `RBTFE` reader - Rx block transfer finished error"]
pub type RBTFE_R = crate::BitReader;
#[doc = "Field `SBE` reader - Start bit error"]
pub type SBE_R = crate::BitReader;
#[doc = "Field `ACKE` reader - Block acknowledge error"]
pub type ACKE_R = crate::BitReader;
#[doc = "Field `LINE` reader - Line error"]
pub type LINE_R = crate::BitReader;
#[doc = "Field `TBTFE` reader - Tx block transfer finished error"]
pub type TBTFE_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - Bit timing error"]
#[inline(always)]
pub fn bte(&self) -> BTE_R {
BTE_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Bit period error"]
#[inline(always)]
pub fn bpe(&self) -> BPE_R {
BPE_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Rx block transfer finished error"]
#[inline(always)]
pub fn rbtfe(&self) -> RBTFE_R {
RBTFE_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Start bit error"]
#[inline(always)]
pub fn sbe(&self) -> SBE_R {
SBE_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Block acknowledge error"]
#[inline(always)]
pub fn acke(&self) -> ACKE_R {
ACKE_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - Line error"]
#[inline(always)]
pub fn line(&self) -> LINE_R {
LINE_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - Tx block transfer finished error"]
#[inline(always)]
pub fn tbtfe(&self) -> TBTFE_R {
TBTFE_R::new(((self.bits >> 6) & 1) != 0)
}
}
#[doc = "CEC error status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`esr::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ESR_SPEC;
impl crate::RegisterSpec for ESR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`esr::R`](R) reader structure"]
impl crate::Readable for ESR_SPEC {}
#[doc = "`reset()` method sets ESR to value 0"]
impl crate::Resettable for ESR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::sync::Once;
use gristmill::init_logging;
use gristmill::asset::{Asset, AssetExt, AssetResult, AssetError, category, Resources, resource::AssetList};
// -------------------------------------------------------------------------------------------------
// Font objects more or less need a static lifetime. To layout text we use PositionedGlyph, which borrows the font, which is
// a huge headache if the font has any lifetime other than static. (Note that this is NOT the lifetime parameter for the Font type).
static mut FONTS: Option<FontStore> = None;
static FONTS_INIT: Once = Once::new();
pub fn fonts() -> &'static FontStore {
if !FONTS_INIT.is_completed() {
panic!("no fonts have been loaded");
}
unsafe {
FONTS.as_ref().unwrap()
}
}
fn read_fonts() -> (FontStore, FontList) {
let mut font_store = FontStore { fonts: Vec::new() };
let mut font_list = FontList { names: Vec::new() };
let asset_list = match AssetList::read("fonts") {
Ok(value) => value,
Err(error) => {
log::error!("Failed to load font list: {}", error);
return (font_store, font_list);
}
};
if asset_list.loader() != "font" {
log::error!("Invalid loader for font list (expected \"font\", got \"{}\")", asset_list.loader());
return (font_store, font_list);
}
for item in asset_list {
if item.asset_type != "font" {
log::warn!("Invalid asset type in font list (expected \"font\", got \"{}\")", item.asset_type);
continue;
}
match FontAsset::read(&item.asset_path) {
Ok(font) => {
font_store.fonts.push(font);
font_list.names.push(item.name);
}
Err(error) => log::error!("Failed to load font {}: {}", item.asset_path, error)
}
}
(font_store, font_list)
}
pub fn load_fonts(resources: &mut Resources) {
init_logging();
FONTS_INIT.call_once(|| {
let (font_store, font_list) = read_fonts();
resources.insert("fonts", font_list);
unsafe {
FONTS = Some(font_store);
}
});
}
// -------------------------------------------------------------------------------------------------
struct FontAsset(rusttype::Font<'static>);
impl Asset for FontAsset {
type Category = category::Data;
fn read(asset_path: &str) -> AssetResult<Self> {
let file_path = Self::get_file(asset_path, "ttf");
log::trace!("Opening file {}", file_path.to_string_lossy());
let font = match rusttype::Font::try_from_vec(std::fs::read(&file_path)?) {
Some(f) => f,
None => return Err(AssetError::InvalidData),
};
Ok(FontAsset(font))
}
}
#[derive(Copy, Clone, Default, Eq, PartialEq, Debug)]
pub struct Font(usize);
impl Font {
pub fn null() -> Font { Font(usize::MAX) }
pub fn is_null(&self) -> bool { self.0 == usize::MAX }
}
pub struct FontStore {
fonts: Vec<FontAsset>,
}
impl FontStore {
pub fn get(&self, index: Font) -> Option<&rusttype::Font<'static>> {
self.fonts.get(index.0).map(|asset| &asset.0)
}
}
pub struct FontList {
names: Vec<String>,
}
impl FontList {
pub fn find(&self, name: &str) -> Font {
let index = self.names.iter().position(|n| n == name);
index.map(Font).unwrap_or_else(|| {
log::warn!("Unknown font {}", name);
Font::null()
})
}
}
|
extern crate dmbc;
extern crate exonum;
extern crate exonum_testkit;
extern crate hyper;
extern crate iron;
extern crate iron_test;
extern crate mount;
extern crate serde_json;
pub mod dmbc_testkit;
use dmbc_testkit::{DmbcTestApiBuilder, DmbcTestKitApi};
use exonum::crypto;
use exonum::messages::Message;
use hyper::status::StatusCode;
use dmbc::currency::api::transaction::TransactionResponse;
use dmbc::currency::assets::{AssetBundle, AssetInfo};
use dmbc::currency::configuration::{Configuration, TransactionFees, TransactionPermissions};
use dmbc::currency::error::Error;
use dmbc::currency::transactions::builders::transaction;
use dmbc::currency::wallet::Wallet;
#[test]
fn delete_assets_one_from_bundle() {
let meta_data = "asset";
let units = 5;
let units_to_remove = 1;
let transaction_fee = 100;
let balance = 100_000;
let fixed = 10;
let config_fees = TransactionFees::with_default_key(0, 0, transaction_fee, 0, 0, 0);
let permissions = TransactionPermissions::default();
let (public_key, secret_key) = crypto::gen_keypair();
let (asset, info) = dmbc_testkit::create_asset(
meta_data,
units,
dmbc_testkit::asset_fees(fixed, "0.0".parse().unwrap()),
&public_key,
);
let mut testkit = DmbcTestApiBuilder::new()
.with_configuration(Configuration::new(config_fees, permissions))
.add_wallet_value(&public_key, Wallet::new(balance, vec![]))
.add_asset_to_wallet(&public_key, (asset.clone(), info.clone()))
.create();
let api = testkit.api();
let tx_delete_assets = transaction::Builder::new()
.keypair(public_key, secret_key)
.tx_del_assets()
.add_asset(meta_data, units_to_remove)
.seed(5)
.build();
let tx_hash = tx_delete_assets.hash();
let (status, response) = api.post_tx(&tx_delete_assets);
testkit.create_block();
// check post response
assert_eq!(status, StatusCode::Created);
assert_eq!(response, Ok(Ok(TransactionResponse { tx_hash })));
let (_, tx_status) = api.get_tx_status(&tx_delete_assets);
assert_eq!(tx_status, Ok(Ok(())));
let wallet = api.get_wallet(&public_key);
let wallet_assets = api.get_wallet_assets(&public_key);
let assets: Vec<AssetBundle> = wallet_assets.iter().map(|a| a.into()).collect();
let expected_balance = balance - transaction_fee;
assert_eq!(wallet.balance, expected_balance);
assert_eq!(
assets,
vec![AssetBundle::new(
asset.clone().id(),
units - units_to_remove,
)]
);
let assets_infos: Vec<AssetInfo> = wallet_assets
.iter()
.map(|a| a.clone().meta_data.unwrap())
.collect();
assert_eq!(assets_infos[0], info.decrease(units_to_remove).unwrap());
}
#[test]
fn delete_assets_all_from_bundle() {
let meta_data = "asset";
let units = 5;
let transaction_fee = 100;
let balance = 100_000;
let fixed = 10;
let config_fees = TransactionFees::with_default_key(0, 0, transaction_fee, 0, 0, 0);
let permissions = TransactionPermissions::default();
let (public_key, secret_key) = crypto::gen_keypair();
let (asset, info) = dmbc_testkit::create_asset(
meta_data,
units,
dmbc_testkit::asset_fees(fixed, "0.0".parse().unwrap()),
&public_key,
);
let mut testkit = DmbcTestApiBuilder::new()
.with_configuration(Configuration::new(config_fees, permissions))
.add_wallet_value(&public_key, Wallet::new(balance, vec![]))
.add_asset_to_wallet(&public_key, (asset.clone(), info))
.create();
let api = testkit.api();
let tx_delete_assets = transaction::Builder::new()
.keypair(public_key, secret_key)
.tx_del_assets()
.add_asset(meta_data, units)
.seed(5)
.build();
let tx_hash = tx_delete_assets.hash();
let (status, response) = api.post_tx(&tx_delete_assets);
testkit.create_block();
// check post response
assert_eq!(status, StatusCode::Created);
assert_eq!(response, Ok(Ok(TransactionResponse { tx_hash })));
let (_, tx_status) = api.get_tx_status(&tx_delete_assets);
assert_eq!(tx_status, Ok(Ok(())));
let wallet = api.get_wallet(&public_key);
let expected_balance = balance - transaction_fee;
assert_eq!(wallet.balance, expected_balance);
assert!(wallet.assets_count == 0);
}
#[test]
fn delete_assets_that_doent_exist() {
let meta_data = "asset";
let units = 5;
let transaction_fee = 100;
let balance = 100_000;
let config_fees = TransactionFees::with_default_key(0, 0, transaction_fee, 0, 0, 0);
let permissions = TransactionPermissions::default();
let (public_key, secret_key) = crypto::gen_keypair();
let mut testkit = DmbcTestApiBuilder::new()
.with_configuration(Configuration::new(config_fees, permissions))
.add_wallet_value(&public_key, Wallet::new(balance, vec![]))
.create();
let api = testkit.api();
let tx_delete_assets = transaction::Builder::new()
.keypair(public_key, secret_key)
.tx_del_assets()
.add_asset(meta_data, units)
.seed(1)
.build();
let tx_hash = tx_delete_assets.hash();
let (status, response) = api.post_tx(&tx_delete_assets);
testkit.create_block();
// check post response
assert_eq!(status, StatusCode::Created);
assert_eq!(response, Ok(Ok(TransactionResponse { tx_hash })));
let (_, tx_status) = api.get_tx_status(&tx_delete_assets);
assert_eq!(tx_status, Ok(Err(Error::AssetNotFound)));
let wallet = api.get_wallet(&public_key);
let expected_balance = balance - transaction_fee;
assert_eq!(wallet.balance, expected_balance);
}
#[test]
fn delete_assets_that_doent_exist2() {
let meta_data = "asset";
let meta_data2 = "another_asset";
let units = 5;
let transaction_fee = 100;
let balance = 100_000;
let fixed = 10;
let config_fees = TransactionFees::with_default_key(0, 0, transaction_fee, 0, 0, 0);
let permissions = TransactionPermissions::default();
let (public_key, secret_key) = crypto::gen_keypair();
let (another_asset, another_info) = dmbc_testkit::create_asset(
meta_data2,
units,
dmbc_testkit::asset_fees(fixed, "0.0".parse().unwrap()),
&public_key,
);
let mut testkit = DmbcTestApiBuilder::new()
.with_configuration(Configuration::new(config_fees, permissions))
.add_wallet_value(&public_key, Wallet::new(balance, vec![]))
.add_asset_to_wallet(&public_key, (another_asset.clone(), another_info.clone()))
.create();
let api = testkit.api();
let tx_delete_assets = transaction::Builder::new()
.keypair(public_key, secret_key)
.tx_del_assets()
.add_asset(meta_data, units)
.seed(1)
.build();
let tx_hash = tx_delete_assets.hash();
let (status, response) = api.post_tx(&tx_delete_assets);
testkit.create_block();
// check post response
assert_eq!(status, StatusCode::Created);
assert_eq!(response, Ok(Ok(TransactionResponse { tx_hash })));
let (_, tx_status) = api.get_tx_status(&tx_delete_assets);
assert_eq!(tx_status, Ok(Err(Error::AssetNotFound)));
let wallet = api.get_wallet(&public_key);
let wallet_assets = api.get_wallet_assets(&public_key);
let assets = wallet_assets
.iter()
.map(|a| a.into())
.collect::<Vec<AssetBundle>>();
let expected_balance = balance - transaction_fee;
assert_eq!(wallet.balance, expected_balance);
assert_eq!(assets, vec![another_asset.clone()]);
let assets_infos = wallet_assets
.iter()
.map(|a| a.clone().meta_data.unwrap())
.collect::<Vec<AssetInfo>>();
assert_eq!(assets_infos[0], another_info);
}
#[test]
fn delete_assets_amount_more_than_wallet_have() {
let meta_data = "asset";
let units = 5;
let units_to_delete = units + 5;
let transaction_fee = 100;
let balance = 100_000;
let fixed = 10;
let config_fees = TransactionFees::with_default_key(0, 0, transaction_fee, 0, 0, 0);
let permissions = TransactionPermissions::default();
let (public_key, secret_key) = crypto::gen_keypair();
let (asset, info) = dmbc_testkit::create_asset(
meta_data,
units,
dmbc_testkit::asset_fees(fixed, "0.0".parse().unwrap()),
&public_key,
);
let mut testkit = DmbcTestApiBuilder::new()
.with_configuration(Configuration::new(config_fees, permissions))
.add_wallet_value(&public_key, Wallet::new(balance, vec![]))
.add_asset_to_wallet(&public_key, (asset.clone(), info.clone()))
.create();
let api = testkit.api();
let tx_delete_assets = transaction::Builder::new()
.keypair(public_key, secret_key)
.tx_del_assets()
.add_asset(meta_data, units_to_delete)
.seed(1)
.build();
let tx_hash = tx_delete_assets.hash();
let (status, response) = api.post_tx(&tx_delete_assets);
testkit.create_block();
// check post response
assert_eq!(status, StatusCode::Created);
assert_eq!(response, Ok(Ok(TransactionResponse { tx_hash })));
let (_, tx_status) = api.get_tx_status(&tx_delete_assets);
assert_eq!(tx_status, Ok(Err(Error::InsufficientAssets)));
let wallet = api.get_wallet(&public_key);
let wallet_assets = api.get_wallet_assets(&public_key);
let assets = wallet_assets
.iter()
.map(|a| a.into())
.collect::<Vec<AssetBundle>>();
let expected_balance = balance - transaction_fee;
assert_eq!(wallet.balance, expected_balance);
assert_eq!(assets, vec![asset.clone()]);
let assets_infos = wallet_assets
.iter()
.map(|a| a.clone().meta_data.unwrap())
.collect::<Vec<AssetInfo>>();
assert_eq!(assets_infos[0], info);
}
#[test]
fn delete_assets_insufficient_funds() {
let meta_data = "asset";
let units = 5;
let transaction_fee = 100;
let balance = 5;
let fixed = 10;
let config_fees = TransactionFees::with_default_key(0, 0, transaction_fee, 0, 0, 0);
let permissions = TransactionPermissions::default();
let (public_key, secret_key) = crypto::gen_keypair();
let (asset, info) = dmbc_testkit::create_asset(
meta_data,
units,
dmbc_testkit::asset_fees(fixed, "0.0".parse().unwrap()),
&public_key,
);
let mut testkit = DmbcTestApiBuilder::new()
.with_configuration(Configuration::new(config_fees, permissions))
.add_wallet_value(&public_key, Wallet::new(balance, vec![]))
.add_asset_to_wallet(&public_key, (asset.clone(), info.clone()))
.create();
let api = testkit.api();
let tx_delete_assets = transaction::Builder::new()
.keypair(public_key, secret_key)
.tx_del_assets()
.add_asset(meta_data, units)
.seed(1)
.build();
let tx_hash = tx_delete_assets.hash();
let (status, response) = api.post_tx(&tx_delete_assets);
testkit.create_block();
// check post response
assert_eq!(status, StatusCode::Created);
assert_eq!(response, Ok(Ok(TransactionResponse { tx_hash })));
let (_, tx_status) = api.get_tx_status(&tx_delete_assets);
assert_eq!(tx_status, Ok(Err(Error::InsufficientFunds)));
let wallet = api.get_wallet(&public_key);
let wallet_assets = api.get_wallet_assets(&public_key);
let assets = wallet_assets
.iter()
.map(|a| a.into())
.collect::<Vec<AssetBundle>>();
assert_eq!(wallet.balance, balance);
assert_eq!(assets, vec![asset.clone()]);
let assets_infos = wallet_assets
.iter()
.map(|a| a.clone().meta_data.unwrap())
.collect::<Vec<AssetInfo>>();
assert_eq!(assets_infos[0], info);
}
#[test]
fn delete_assets_with_different_creator() {
let meta_data = "asset";
let units = 5;
let transaction_fee = 100;
let balance = 100_000;
let fixed = 10;
let config_fees = TransactionFees::with_default_key(0, 0, transaction_fee, 0, 0, 0);
let permissions = TransactionPermissions::default();
let (creator_key, _) = crypto::gen_keypair();
let (public_key, secret_key) = crypto::gen_keypair();
let (asset, info) = dmbc_testkit::create_asset(
meta_data,
units,
dmbc_testkit::asset_fees(fixed, "0.0".parse().unwrap()),
&creator_key,
);
let mut testkit = DmbcTestApiBuilder::new()
.with_configuration(Configuration::new(config_fees, permissions))
.add_wallet_value(&public_key, Wallet::new(balance, vec![]))
.add_asset_to_wallet(&public_key, (asset.clone(), info.clone()))
.create();
let api = testkit.api();
let tx_delete_assets = transaction::Builder::new()
.keypair(public_key, secret_key)
.tx_del_assets()
.add_asset_value(asset.clone())
.seed(1)
.build();
let tx_hash = tx_delete_assets.hash();
let (status, response) = api.post_tx(&tx_delete_assets);
testkit.create_block();
// check post response
assert_eq!(status, StatusCode::Created);
assert_eq!(response, Ok(Ok(TransactionResponse { tx_hash })));
let (_, tx_status) = api.get_tx_status(&tx_delete_assets);
assert_eq!(tx_status, Ok(Ok(())));
let wallet = api.get_wallet(&public_key);
let wallet_assets = api.get_wallet_assets(&public_key);
let assets = wallet_assets
.iter()
.map(|a| a.into())
.collect::<Vec<AssetBundle>>();
let expected_balance = balance - transaction_fee;
assert_eq!(wallet.balance, expected_balance);
assert_eq!(assets, vec![]);
let assets_infos = wallet_assets
.iter()
.map(|a| a.clone().meta_data.unwrap())
.collect::<Vec<AssetInfo>>();
assert_eq!(assets_infos, vec![]);
}
#[test]
fn delete_assets_two_assets_where_one_asset_doesnt_have_enough_items() {
let meta_data1 = "asset1";
let meta_data2 = "asset2";
let units = 5;
let transaction_fee = 100;
let balance = 100_000;
let fixed = 10;
let config_fees = TransactionFees::with_default_key(0, 0, transaction_fee, 0, 0, 0);
let permissions = TransactionPermissions::default();
let (public_key, secret_key) = crypto::gen_keypair();
let (asset1, info1) = dmbc_testkit::create_asset(
meta_data1,
units,
dmbc_testkit::asset_fees(fixed, "0.0".parse().unwrap()),
&public_key,
);
let (asset2, info2) = dmbc_testkit::create_asset(
meta_data2,
units,
dmbc_testkit::asset_fees(fixed, "0.0".parse().unwrap()),
&public_key,
);
let mut testkit = DmbcTestApiBuilder::new()
.with_configuration(Configuration::new(config_fees, permissions))
.add_wallet_value(&public_key, Wallet::new(balance, vec![]))
.add_asset_to_wallet(&public_key, (asset1.clone(), info1.clone()))
.add_asset_to_wallet(&public_key, (asset2.clone(), info2.clone()))
.create();
let api = testkit.api();
let tx_delete_assets = transaction::Builder::new()
.keypair(public_key, secret_key)
.tx_del_assets()
.add_asset_value(AssetBundle::new(asset1.id(), 3))
.add_asset_value(AssetBundle::new(asset2.id(), 7))
.seed(1)
.build();
let tx_hash = tx_delete_assets.hash();
let (status, response) = api.post_tx(&tx_delete_assets);
testkit.create_block();
// check post response
assert_eq!(status, StatusCode::Created);
assert_eq!(response, Ok(Ok(TransactionResponse { tx_hash })));
let (_, tx_status) = api.get_tx_status(&tx_delete_assets);
assert_eq!(tx_status, Ok(Err(Error::InsufficientAssets)));
let wallet = api.get_wallet(&public_key);
let wallet_assets = api.get_wallet_assets(&public_key);
let assets = wallet_assets
.iter()
.map(|a| a.into())
.collect::<Vec<AssetBundle>>();
let expected_balance = balance - transaction_fee;
assert_eq!(wallet.balance, expected_balance);
assert_eq!(assets, vec![asset1.clone(), asset2.clone()]);
let assets_infos = wallet_assets
.iter()
.map(|a| a.clone().meta_data.unwrap())
.collect::<Vec<AssetInfo>>();
assert_eq!(assets_infos, vec![info1, info2]);
}
#[test]
fn delete_assets_two_assets_where_one_have_another_creator() {
let meta_data1 = "asset1";
let meta_data2 = "asset2";
let units = 5;
let transaction_fee = 100;
let balance = 100_000;
let fixed = 10;
let config_fees = TransactionFees::with_default_key(0, 0, transaction_fee, 0, 0, 0);
let permissions = TransactionPermissions::default();
let (creator_key, _) = crypto::gen_keypair();
let (public_key, secret_key) = crypto::gen_keypair();
let (asset1, info1) = dmbc_testkit::create_asset(
meta_data1,
units,
dmbc_testkit::asset_fees(fixed, "0.0".parse().unwrap()),
&creator_key,
);
let (asset2, info2) = dmbc_testkit::create_asset(
meta_data2,
units,
dmbc_testkit::asset_fees(fixed, "0.0".parse().unwrap()),
&public_key,
);
let mut testkit = DmbcTestApiBuilder::new()
.with_configuration(Configuration::new(config_fees, permissions))
.add_wallet_value(&public_key, Wallet::new(balance, vec![]))
.add_asset_to_wallet(&public_key, (asset1.clone(), info1.clone()))
.add_asset_to_wallet(&public_key, (asset2.clone(), info2.clone()))
.create();
let api = testkit.api();
let tx_delete_assets = transaction::Builder::new()
.keypair(public_key, secret_key)
.tx_del_assets()
.add_asset_value(asset1.clone())
.add_asset_value(asset2.clone())
.seed(1)
.build();
let tx_hash = tx_delete_assets.hash();
let (status, response) = api.post_tx(&tx_delete_assets);
testkit.create_block();
// check post response
assert_eq!(status, StatusCode::Created);
assert_eq!(response, Ok(Ok(TransactionResponse { tx_hash })));
let (_, tx_status) = api.get_tx_status(&tx_delete_assets);
assert_eq!(tx_status, Ok(Ok(())));
let wallet = api.get_wallet(&public_key);
let wallet_assets = api.get_wallet_assets(&public_key);
let assets = wallet_assets
.iter()
.map(|a| a.into())
.collect::<Vec<AssetBundle>>();
let expected_balance = balance - transaction_fee;
assert_eq!(wallet.balance, expected_balance);
assert_eq!(assets, vec![]);
let assets_infos = wallet_assets
.iter()
.map(|a| a.clone().meta_data.unwrap())
.collect::<Vec<AssetInfo>>();
assert_eq!(assets_infos, vec![]);
} |
#[allow(unused_assignments, unused_mut, unused_variables)]
pub const STD: &str = {
let mut flags = ["-std=c++11", "/std:c++11"];
#[cfg(feature = "c++14")]
(flags = ["-std=c++14", "/std:c++14"]);
#[cfg(feature = "c++17")]
(flags = ["-std=c++17", "/std:c++17"]);
#[cfg(feature = "c++20")]
(flags = ["-std=c++20", "/std:c++20"]);
let [mut flag, msvc_flag] = flags;
#[cfg(target_env = "msvc")]
(flag = msvc_flag);
flag
};
|
use super::helpers::{
allocations,
edits::invert_edit,
edits::ReadRecorder,
fixtures::{get_language, get_test_grammar, get_test_language},
};
use crate::{
generate::generate_parser_for_grammar,
parse::{perform_edit, Edit},
};
use std::{
sync::atomic::{AtomicUsize, Ordering},
thread, time,
};
use tree_sitter::{IncludedRangesError, InputEdit, LogType, Parser, Point, Range};
use tree_sitter_proc_macro::retry;
#[test]
fn test_parsing_simple_string() {
let mut parser = Parser::new();
parser.set_language(get_language("rust")).unwrap();
let tree = parser
.parse(
"
struct Stuff {}
fn main() {}
",
None,
)
.unwrap();
let root_node = tree.root_node();
assert_eq!(root_node.kind(), "source_file");
assert_eq!(
root_node.to_sexp(),
concat!(
"(source_file ",
"(struct_item name: (type_identifier) body: (field_declaration_list)) ",
"(function_item name: (identifier) parameters: (parameters) body: (block)))"
)
);
let struct_node = root_node.child(0).unwrap();
assert_eq!(struct_node.kind(), "struct_item");
}
#[test]
fn test_parsing_with_logging() {
let mut parser = Parser::new();
parser.set_language(get_language("rust")).unwrap();
let mut messages = Vec::new();
parser.set_logger(Some(Box::new(|log_type, message| {
messages.push((log_type, message.to_string()));
})));
parser
.parse(
"
struct Stuff {}
fn main() {}
",
None,
)
.unwrap();
assert!(messages.contains(&(
LogType::Parse,
"reduce sym:struct_item, child_count:3".to_string()
)));
assert!(messages.contains(&(LogType::Lex, "skip character:' '".to_string())));
let mut row_starts_from_0 = false;
for (_, m) in &messages {
if m.contains("row:0") {
row_starts_from_0 = true;
break;
}
}
assert!(row_starts_from_0);
}
#[test]
#[cfg(unix)]
fn test_parsing_with_debug_graph_enabled() {
use std::io::{BufRead, BufReader, Seek};
let has_zero_indexed_row = |s: &str| s.contains("position: 0,");
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
let mut debug_graph_file = tempfile::tempfile().unwrap();
parser.print_dot_graphs(&debug_graph_file);
parser.parse("const zero = 0", None).unwrap();
debug_graph_file.seek(std::io::SeekFrom::Start(0)).unwrap();
let log_reader = BufReader::new(debug_graph_file)
.lines()
.map(|l| l.expect("Failed to read line from graph log"));
for line in log_reader {
assert!(
!has_zero_indexed_row(&line),
"Graph log output includes zero-indexed row: {}",
line
);
}
}
#[test]
fn test_parsing_with_custom_utf8_input() {
let mut parser = Parser::new();
parser.set_language(get_language("rust")).unwrap();
let lines = &["pub fn foo() {", " 1", "}"];
let tree = parser
.parse_with(
&mut |_, position| {
let row = position.row;
let column = position.column;
if row < lines.len() {
if column < lines[row].as_bytes().len() {
&lines[row].as_bytes()[column..]
} else {
"\n".as_bytes()
}
} else {
&[]
}
},
None,
)
.unwrap();
let root = tree.root_node();
assert_eq!(
root.to_sexp(),
concat!(
"(source_file ",
"(function_item ",
"(visibility_modifier) ",
"name: (identifier) ",
"parameters: (parameters) ",
"body: (block (integer_literal))))"
)
);
assert_eq!(root.kind(), "source_file");
assert_eq!(root.has_error(), false);
assert_eq!(root.child(0).unwrap().kind(), "function_item");
}
#[test]
fn test_parsing_with_custom_utf16_input() {
let mut parser = Parser::new();
parser.set_language(get_language("rust")).unwrap();
let lines: Vec<Vec<u16>> = ["pub fn foo() {", " 1", "}"]
.iter()
.map(|s| s.encode_utf16().collect())
.collect();
let tree = parser
.parse_utf16_with(
&mut |_, position| {
let row = position.row;
let column = position.column;
if row < lines.len() {
if column < lines[row].len() {
&lines[row][column..]
} else {
&[10]
}
} else {
&[]
}
},
None,
)
.unwrap();
let root = tree.root_node();
assert_eq!(
root.to_sexp(),
"(source_file (function_item (visibility_modifier) name: (identifier) parameters: (parameters) body: (block (integer_literal))))"
);
assert_eq!(root.kind(), "source_file");
assert_eq!(root.has_error(), false);
assert_eq!(root.child(0).unwrap().kind(), "function_item");
}
#[test]
fn test_parsing_with_callback_returning_owned_strings() {
let mut parser = Parser::new();
parser.set_language(get_language("rust")).unwrap();
let text = b"pub fn foo() { 1 }";
let tree = parser
.parse_with(
&mut |i, _| String::from_utf8(text[i..].to_vec()).unwrap(),
None,
)
.unwrap();
let root = tree.root_node();
assert_eq!(
root.to_sexp(),
"(source_file (function_item (visibility_modifier) name: (identifier) parameters: (parameters) body: (block (integer_literal))))"
);
}
#[test]
fn test_parsing_text_with_byte_order_mark() {
let mut parser = Parser::new();
parser.set_language(get_language("rust")).unwrap();
// Parse UTF16 text with a BOM
let tree = parser
.parse_utf16(
&"\u{FEFF}fn a() {}".encode_utf16().collect::<Vec<_>>(),
None,
)
.unwrap();
assert_eq!(
tree.root_node().to_sexp(),
"(source_file (function_item name: (identifier) parameters: (parameters) body: (block)))"
);
assert_eq!(tree.root_node().start_byte(), 2);
// Parse UTF8 text with a BOM
let mut tree = parser.parse("\u{FEFF}fn a() {}", None).unwrap();
assert_eq!(
tree.root_node().to_sexp(),
"(source_file (function_item name: (identifier) parameters: (parameters) body: (block)))"
);
assert_eq!(tree.root_node().start_byte(), 3);
// Edit the text, inserting a character before the BOM. The BOM is now an error.
tree.edit(&InputEdit {
start_byte: 0,
old_end_byte: 0,
new_end_byte: 1,
start_position: Point::new(0, 0),
old_end_position: Point::new(0, 0),
new_end_position: Point::new(0, 1),
});
let mut tree = parser.parse(" \u{FEFF}fn a() {}", Some(&tree)).unwrap();
assert_eq!(
tree.root_node().to_sexp(),
"(source_file (ERROR (UNEXPECTED 65279)) (function_item name: (identifier) parameters: (parameters) body: (block)))"
);
assert_eq!(tree.root_node().start_byte(), 1);
// Edit the text again, putting the BOM back at the beginning.
tree.edit(&InputEdit {
start_byte: 0,
old_end_byte: 1,
new_end_byte: 0,
start_position: Point::new(0, 0),
old_end_position: Point::new(0, 1),
new_end_position: Point::new(0, 0),
});
let tree = parser.parse("\u{FEFF}fn a() {}", Some(&tree)).unwrap();
assert_eq!(
tree.root_node().to_sexp(),
"(source_file (function_item name: (identifier) parameters: (parameters) body: (block)))"
);
assert_eq!(tree.root_node().start_byte(), 3);
}
#[test]
fn test_parsing_invalid_chars_at_eof() {
let mut parser = Parser::new();
parser.set_language(get_language("json")).unwrap();
let tree = parser.parse(b"\xdf", None).unwrap();
assert_eq!(
tree.root_node().to_sexp(),
"(document (ERROR (UNEXPECTED INVALID)))"
);
}
#[test]
fn test_parsing_unexpected_null_characters_within_source() {
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
let tree = parser.parse(b"var \0 something;", None).unwrap();
assert_eq!(
tree.root_node().to_sexp(),
"(program (variable_declaration (ERROR (UNEXPECTED '\\0')) (variable_declarator name: (identifier))))"
);
}
#[test]
fn test_parsing_ends_when_input_callback_returns_empty() {
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
let mut i = 0;
let source = b"abcdefghijklmnoqrs";
let tree = parser
.parse_with(
&mut |offset, _| {
i += 1;
if offset >= 6 {
b""
} else {
&source[offset..usize::min(source.len(), offset + 3)]
}
},
None,
)
.unwrap();
assert_eq!(tree.root_node().end_byte(), 6);
}
// Incremental parsing
#[test]
fn test_parsing_after_editing_beginning_of_code() {
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
let mut code = b"123 + 456 * (10 + x);".to_vec();
let mut tree = parser.parse(&code, None).unwrap();
assert_eq!(
tree.root_node().to_sexp(),
concat!(
"(program (expression_statement (binary_expression ",
"left: (number) ",
"right: (binary_expression left: (number) right: (parenthesized_expression ",
"(binary_expression left: (number) right: (identifier)))))))",
)
);
perform_edit(
&mut tree,
&mut code,
&Edit {
position: 3,
deleted_length: 0,
inserted_text: b" || 5".to_vec(),
},
);
let mut recorder = ReadRecorder::new(&code);
let tree = parser
.parse_with(&mut |i, _| recorder.read(i), Some(&tree))
.unwrap();
assert_eq!(
tree.root_node().to_sexp(),
concat!(
"(program (expression_statement (binary_expression ",
"left: (number) ",
"right: (binary_expression ",
"left: (number) ",
"right: (binary_expression ",
"left: (number) ",
"right: (parenthesized_expression (binary_expression left: (number) right: (identifier))))))))",
)
);
assert_eq!(recorder.strings_read(), vec!["123 || 5 "]);
}
#[test]
fn test_parsing_after_editing_end_of_code() {
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
let mut code = b"x * (100 + abc);".to_vec();
let mut tree = parser.parse(&code, None).unwrap();
assert_eq!(
tree.root_node().to_sexp(),
concat!(
"(program (expression_statement (binary_expression ",
"left: (identifier) ",
"right: (parenthesized_expression (binary_expression left: (number) right: (identifier))))))",
)
);
let position = code.len() - 2;
perform_edit(
&mut tree,
&mut code,
&Edit {
position,
deleted_length: 0,
inserted_text: b".d".to_vec(),
},
);
let mut recorder = ReadRecorder::new(&code);
let tree = parser
.parse_with(&mut |i, _| recorder.read(i), Some(&tree))
.unwrap();
assert_eq!(
tree.root_node().to_sexp(),
concat!(
"(program (expression_statement (binary_expression ",
"left: (identifier) ",
"right: (parenthesized_expression (binary_expression ",
"left: (number) ",
"right: (member_expression ",
"object: (identifier) ",
"property: (property_identifier)))))))"
)
);
assert_eq!(recorder.strings_read(), vec![" * ", "abc.d)",]);
}
#[test]
fn test_parsing_empty_file_with_reused_tree() {
let mut parser = Parser::new();
parser.set_language(get_language("rust")).unwrap();
let tree = parser.parse("", None);
parser.parse("", tree.as_ref());
let tree = parser.parse("\n ", None);
parser.parse("\n ", tree.as_ref());
}
#[test]
fn test_parsing_after_editing_tree_that_depends_on_column_values() {
let (grammar, path) = get_test_grammar("uses_current_column");
let (grammar_name, parser_code) = generate_parser_for_grammar(&grammar).unwrap();
let mut parser = Parser::new();
parser
.set_language(get_test_language(
&grammar_name,
&parser_code,
path.as_ref().map(AsRef::as_ref),
))
.unwrap();
let mut code = b"
a = b
c = do d
e + f
g
h + i
"
.to_vec();
let mut tree = parser.parse(&code, None).unwrap();
assert_eq!(
tree.root_node().to_sexp(),
concat!(
"(block ",
"(binary_expression (identifier) (identifier)) ",
"(binary_expression (identifier) (do_expression (block (identifier) (binary_expression (identifier) (identifier)) (identifier)))) ",
"(binary_expression (identifier) (identifier)))",
)
);
perform_edit(
&mut tree,
&mut code,
&Edit {
position: 8,
deleted_length: 0,
inserted_text: b"1234".to_vec(),
},
);
assert_eq!(
code,
b"
a = b
c1234 = do d
e + f
g
h + i
"
);
let mut recorder = ReadRecorder::new(&code);
let tree = parser
.parse_with(&mut |i, _| recorder.read(i), Some(&tree))
.unwrap();
assert_eq!(
tree.root_node().to_sexp(),
concat!(
"(block ",
"(binary_expression (identifier) (identifier)) ",
"(binary_expression (identifier) (do_expression (block (identifier)))) ",
"(binary_expression (identifier) (identifier)) ",
"(identifier) ",
"(binary_expression (identifier) (identifier)))",
)
);
assert_eq!(
recorder.strings_read(),
vec!["\nc1234 = do d\n e + f\n g\n"]
);
}
#[test]
fn test_parsing_after_detecting_error_in_the_middle_of_a_string_token() {
let mut parser = Parser::new();
parser.set_language(get_language("python")).unwrap();
let mut source = b"a = b, 'c, d'".to_vec();
let tree = parser.parse(&source, None).unwrap();
assert_eq!(
tree.root_node().to_sexp(),
"(module (expression_statement (assignment left: (identifier) right: (expression_list (identifier) (string (string_start) (string_content) (string_end))))))"
);
// Delete a suffix of the source code, starting in the middle of the string
// literal, after some whitespace. With this deletion, the remaining string
// content: "c, " looks like two valid python tokens: an identifier and a comma.
// When this edit is undone, in order correctly recover the original tree, the
// parser needs to remember that before matching the `c` as an identifier, it
// lookahead ahead several bytes, trying to find the closing quotation mark in
// order to match the "string content" node.
let edit_ix = std::str::from_utf8(&source).unwrap().find("d'").unwrap();
let edit = Edit {
position: edit_ix,
deleted_length: source.len() - edit_ix,
inserted_text: Vec::new(),
};
let undo = invert_edit(&source, &edit);
let mut tree2 = tree.clone();
perform_edit(&mut tree2, &mut source, &edit);
tree2 = parser.parse(&source, Some(&tree2)).unwrap();
assert!(tree2.root_node().has_error());
let mut tree3 = tree2.clone();
perform_edit(&mut tree3, &mut source, &undo);
tree3 = parser.parse(&source, Some(&tree3)).unwrap();
assert_eq!(tree3.root_node().to_sexp(), tree.root_node().to_sexp(),);
}
// Thread safety
#[test]
fn test_parsing_on_multiple_threads() {
// Parse this source file so that each thread has a non-trivial amount of
// work to do.
let this_file_source = include_str!("parser_test.rs");
let mut parser = Parser::new();
parser.set_language(get_language("rust")).unwrap();
let tree = parser.parse(this_file_source, None).unwrap();
let mut parse_threads = Vec::new();
for thread_id in 1..5 {
let mut tree_clone = tree.clone();
parse_threads.push(thread::spawn(move || {
// For each thread, prepend a different number of declarations to the
// source code.
let mut prepend_line_count = 0;
let mut prepended_source = String::new();
for _ in 0..thread_id {
prepend_line_count += 2;
prepended_source += "struct X {}\n\n";
}
tree_clone.edit(&InputEdit {
start_byte: 0,
old_end_byte: 0,
new_end_byte: prepended_source.len(),
start_position: Point::new(0, 0),
old_end_position: Point::new(0, 0),
new_end_position: Point::new(prepend_line_count, 0),
});
prepended_source += this_file_source;
// Reparse using the old tree as a starting point.
let mut parser = Parser::new();
parser.set_language(get_language("rust")).unwrap();
parser.parse(&prepended_source, Some(&tree_clone)).unwrap()
}));
}
// Check that the trees have the expected relationship to one another.
let trees = parse_threads
.into_iter()
.map(|thread| thread.join().unwrap());
let child_count_differences = trees
.map(|t| t.root_node().child_count() - tree.root_node().child_count())
.collect::<Vec<_>>();
assert_eq!(child_count_differences, &[1, 2, 3, 4]);
}
#[test]
fn test_parsing_cancelled_by_another_thread() {
let cancellation_flag = std::sync::Arc::new(AtomicUsize::new(0));
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
unsafe { parser.set_cancellation_flag(Some(&cancellation_flag)) };
// Long input - parsing succeeds
let tree = parser.parse_with(
&mut |offset, _| {
if offset == 0 {
" [".as_bytes()
} else if offset >= 20000 {
"".as_bytes()
} else {
"0,".as_bytes()
}
},
None,
);
assert!(tree.is_some());
let flag = cancellation_flag.clone();
let cancel_thread = thread::spawn(move || {
thread::sleep(time::Duration::from_millis(100));
flag.store(1, Ordering::SeqCst);
});
// Infinite input
let tree = parser.parse_with(
&mut |offset, _| {
thread::yield_now();
thread::sleep(time::Duration::from_millis(10));
if offset == 0 {
b" ["
} else {
b"0,"
}
},
None,
);
// Parsing returns None because it was cancelled.
cancel_thread.join().unwrap();
assert!(tree.is_none());
}
// Timeouts
#[test]
#[retry(10)]
fn test_parsing_with_a_timeout() {
let mut parser = Parser::new();
parser.set_language(get_language("json")).unwrap();
// Parse an infinitely-long array, but pause after 1ms of processing.
parser.set_timeout_micros(1000);
let start_time = time::Instant::now();
let tree = parser.parse_with(
&mut |offset, _| {
if offset == 0 {
b" ["
} else {
b",0"
}
},
None,
);
assert!(tree.is_none());
#[cfg(not(target_arch = "sparc64"))]
assert!(start_time.elapsed().as_micros() < 2000);
#[cfg(target_arch = "sparc64")]
assert!(start_time.elapsed().as_micros() < 8000);
// Continue parsing, but pause after 1 ms of processing.
parser.set_timeout_micros(5000);
let start_time = time::Instant::now();
let tree = parser.parse_with(
&mut |offset, _| {
if offset == 0 {
b" ["
} else {
b",0"
}
},
None,
);
assert!(tree.is_none());
assert!(start_time.elapsed().as_micros() > 100);
assert!(start_time.elapsed().as_micros() < 10000);
// Finish parsing
parser.set_timeout_micros(0);
let tree = parser
.parse_with(
&mut |offset, _| {
if offset > 5000 {
"".as_bytes()
} else if offset == 5000 {
"]".as_bytes()
} else {
",0".as_bytes()
}
},
None,
)
.unwrap();
assert_eq!(tree.root_node().child(0).unwrap().kind(), "array");
}
#[test]
#[retry(10)]
fn test_parsing_with_a_timeout_and_a_reset() {
let mut parser = Parser::new();
parser.set_language(get_language("json")).unwrap();
parser.set_timeout_micros(5);
let tree = parser.parse(
"[\"ok\", 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32]",
None,
);
assert!(tree.is_none());
// Without calling reset, the parser continues from where it left off, so
// it does not see the changes to the beginning of the source code.
parser.set_timeout_micros(0);
let tree = parser.parse(
"[null, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32]",
None,
).unwrap();
assert_eq!(
tree.root_node()
.named_child(0)
.unwrap()
.named_child(0)
.unwrap()
.kind(),
"string"
);
parser.set_timeout_micros(5);
let tree = parser.parse(
"[\"ok\", 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32]",
None,
);
assert!(tree.is_none());
// By calling reset, we force the parser to start over from scratch so
// that it sees the changes to the beginning of the source code.
parser.set_timeout_micros(0);
parser.reset();
let tree = parser.parse(
"[null, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32]",
None,
).unwrap();
assert_eq!(
tree.root_node()
.named_child(0)
.unwrap()
.named_child(0)
.unwrap()
.kind(),
"null"
);
}
#[test]
#[retry(10)]
fn test_parsing_with_a_timeout_and_implicit_reset() {
allocations::record(|| {
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
parser.set_timeout_micros(5);
let tree = parser.parse(
"[\"ok\", 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32]",
None,
);
assert!(tree.is_none());
// Changing the parser's language implicitly resets, discarding
// the previous partial parse.
parser.set_language(get_language("json")).unwrap();
parser.set_timeout_micros(0);
let tree = parser.parse(
"[null, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32]",
None,
).unwrap();
assert_eq!(
tree.root_node()
.named_child(0)
.unwrap()
.named_child(0)
.unwrap()
.kind(),
"null"
);
});
}
#[test]
#[retry(10)]
fn test_parsing_with_timeout_and_no_completion() {
allocations::record(|| {
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
parser.set_timeout_micros(5);
let tree = parser.parse(
"[\"ok\", 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32]",
None,
);
assert!(tree.is_none());
// drop the parser when it has an unfinished parse
});
}
// Included Ranges
#[test]
fn test_parsing_with_one_included_range() {
let source_code = "<span>hi</span><script>console.log('sup');</script>";
let mut parser = Parser::new();
parser.set_language(get_language("html")).unwrap();
let html_tree = parser.parse(source_code, None).unwrap();
let script_content_node = html_tree.root_node().child(1).unwrap().child(1).unwrap();
assert_eq!(script_content_node.kind(), "raw_text");
parser
.set_included_ranges(&[script_content_node.range()])
.unwrap();
parser.set_language(get_language("javascript")).unwrap();
let js_tree = parser.parse(source_code, None).unwrap();
assert_eq!(
js_tree.root_node().to_sexp(),
concat!(
"(program (expression_statement (call_expression ",
"function: (member_expression object: (identifier) property: (property_identifier)) ",
"arguments: (arguments (string)))))",
)
);
assert_eq!(
js_tree.root_node().start_position(),
Point::new(0, source_code.find("console").unwrap())
);
assert_eq!(js_tree.included_ranges(), &[script_content_node.range()]);
}
#[test]
fn test_parsing_with_multiple_included_ranges() {
let source_code = "html `<div>Hello, ${name.toUpperCase()}, it's <b>${now()}</b>.</div>`";
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
let js_tree = parser.parse(source_code, None).unwrap();
let template_string_node = js_tree
.root_node()
.descendant_for_byte_range(
source_code.find("<div>").unwrap(),
source_code.find("Hello").unwrap(),
)
.unwrap();
assert_eq!(template_string_node.kind(), "template_string");
let open_quote_node = template_string_node.child(0).unwrap();
let interpolation_node1 = template_string_node.child(1).unwrap();
let interpolation_node2 = template_string_node.child(2).unwrap();
let close_quote_node = template_string_node.child(3).unwrap();
parser.set_language(get_language("html")).unwrap();
let html_ranges = &[
Range {
start_byte: open_quote_node.end_byte(),
start_point: open_quote_node.end_position(),
end_byte: interpolation_node1.start_byte(),
end_point: interpolation_node1.start_position(),
},
Range {
start_byte: interpolation_node1.end_byte(),
start_point: interpolation_node1.end_position(),
end_byte: interpolation_node2.start_byte(),
end_point: interpolation_node2.start_position(),
},
Range {
start_byte: interpolation_node2.end_byte(),
start_point: interpolation_node2.end_position(),
end_byte: close_quote_node.start_byte(),
end_point: close_quote_node.start_position(),
},
];
parser.set_included_ranges(html_ranges).unwrap();
let html_tree = parser.parse(source_code, None).unwrap();
assert_eq!(
html_tree.root_node().to_sexp(),
concat!(
"(fragment (element",
" (start_tag (tag_name))",
" (text)",
" (element (start_tag (tag_name)) (end_tag (tag_name)))",
" (text)",
" (end_tag (tag_name))))",
)
);
assert_eq!(html_tree.included_ranges(), html_ranges);
let div_element_node = html_tree.root_node().child(0).unwrap();
let hello_text_node = div_element_node.child(1).unwrap();
let b_element_node = div_element_node.child(2).unwrap();
let b_start_tag_node = b_element_node.child(0).unwrap();
let b_end_tag_node = b_element_node.child(1).unwrap();
assert_eq!(hello_text_node.kind(), "text");
assert_eq!(
hello_text_node.start_byte(),
source_code.find("Hello").unwrap()
);
assert_eq!(
hello_text_node.end_byte(),
source_code.find(" <b>").unwrap()
);
assert_eq!(b_start_tag_node.kind(), "start_tag");
assert_eq!(
b_start_tag_node.start_byte(),
source_code.find("<b>").unwrap()
);
assert_eq!(
b_start_tag_node.end_byte(),
source_code.find("${now()}").unwrap()
);
assert_eq!(b_end_tag_node.kind(), "end_tag");
assert_eq!(
b_end_tag_node.start_byte(),
source_code.find("</b>").unwrap()
);
assert_eq!(
b_end_tag_node.end_byte(),
source_code.find(".</div>").unwrap()
);
}
#[test]
fn test_parsing_with_included_range_containing_mismatched_positions() {
let source_code = "<div>test</div>{_ignore_this_part_}";
let mut parser = Parser::new();
parser.set_language(get_language("html")).unwrap();
let end_byte = source_code.find("{_ignore_this_part_").unwrap();
let range_to_parse = Range {
start_byte: 0,
start_point: Point {
row: 10,
column: 12,
},
end_byte,
end_point: Point {
row: 10,
column: 12 + end_byte,
},
};
parser.set_included_ranges(&[range_to_parse]).unwrap();
let html_tree = parser
.parse_with(&mut chunked_input(source_code, 3), None)
.unwrap();
assert_eq!(html_tree.root_node().range(), range_to_parse);
assert_eq!(
html_tree.root_node().to_sexp(),
"(fragment (element (start_tag (tag_name)) (text) (end_tag (tag_name))))"
);
}
#[test]
fn test_parsing_error_in_invalid_included_ranges() {
let mut parser = Parser::new();
// Ranges are not ordered
let error = parser
.set_included_ranges(&[
Range {
start_byte: 23,
end_byte: 29,
start_point: Point::new(0, 23),
end_point: Point::new(0, 29),
},
Range {
start_byte: 0,
end_byte: 5,
start_point: Point::new(0, 0),
end_point: Point::new(0, 5),
},
Range {
start_byte: 50,
end_byte: 60,
start_point: Point::new(0, 50),
end_point: Point::new(0, 60),
},
])
.unwrap_err();
assert_eq!(error, IncludedRangesError(1));
// Range ends before it starts
let error = parser
.set_included_ranges(&[Range {
start_byte: 10,
end_byte: 5,
start_point: Point::new(0, 10),
end_point: Point::new(0, 5),
}])
.unwrap_err();
assert_eq!(error, IncludedRangesError(0));
}
#[test]
fn test_parsing_utf16_code_with_errors_at_the_end_of_an_included_range() {
let source_code = "<script>a.</script>";
let utf16_source_code: Vec<u16> = source_code.as_bytes().iter().map(|c| *c as u16).collect();
let start_byte = 2 * source_code.find("a.").unwrap();
let end_byte = 2 * source_code.find("</script>").unwrap();
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
parser
.set_included_ranges(&[Range {
start_byte,
end_byte,
start_point: Point::new(0, start_byte),
end_point: Point::new(0, end_byte),
}])
.unwrap();
let tree = parser.parse_utf16(&utf16_source_code, None).unwrap();
assert_eq!(tree.root_node().to_sexp(), "(program (ERROR (identifier)))");
}
#[test]
fn test_parsing_with_external_scanner_that_uses_included_range_boundaries() {
let source_code = "a <%= b() %> c <% d() %>";
let range1_start_byte = source_code.find(" b() ").unwrap();
let range1_end_byte = range1_start_byte + " b() ".len();
let range2_start_byte = source_code.find(" d() ").unwrap();
let range2_end_byte = range2_start_byte + " d() ".len();
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
parser
.set_included_ranges(&[
Range {
start_byte: range1_start_byte,
end_byte: range1_end_byte,
start_point: Point::new(0, range1_start_byte),
end_point: Point::new(0, range1_end_byte),
},
Range {
start_byte: range2_start_byte,
end_byte: range2_end_byte,
start_point: Point::new(0, range2_start_byte),
end_point: Point::new(0, range2_end_byte),
},
])
.unwrap();
let tree = parser.parse(source_code, None).unwrap();
let root = tree.root_node();
let statement1 = root.child(0).unwrap();
let statement2 = root.child(1).unwrap();
assert_eq!(
root.to_sexp(),
concat!(
"(program",
" (expression_statement (call_expression function: (identifier) arguments: (arguments)))",
" (expression_statement (call_expression function: (identifier) arguments: (arguments))))"
)
);
assert_eq!(statement1.start_byte(), source_code.find("b()").unwrap());
assert_eq!(statement1.end_byte(), source_code.find(" %> c").unwrap());
assert_eq!(statement2.start_byte(), source_code.find("d()").unwrap());
assert_eq!(statement2.end_byte(), source_code.len() - " %>".len());
}
#[test]
fn test_parsing_with_a_newly_excluded_range() {
let mut source_code = String::from("<div><span><%= something %></span></div>");
// Parse HTML including the template directive, which will cause an error
let mut parser = Parser::new();
parser.set_language(get_language("html")).unwrap();
let mut first_tree = parser
.parse_with(&mut chunked_input(&source_code, 3), None)
.unwrap();
// Insert code at the beginning of the document.
let prefix = "a very very long line of plain text. ";
first_tree.edit(&InputEdit {
start_byte: 0,
old_end_byte: 0,
new_end_byte: prefix.len(),
start_position: Point::new(0, 0),
old_end_position: Point::new(0, 0),
new_end_position: Point::new(0, prefix.len()),
});
source_code.insert_str(0, prefix);
// Parse the HTML again, this time *excluding* the template directive
// (which has moved since the previous parse).
let directive_start = source_code.find("<%=").unwrap();
let directive_end = source_code.find("</span>").unwrap();
let source_code_end = source_code.len();
parser
.set_included_ranges(&[
Range {
start_byte: 0,
end_byte: directive_start,
start_point: Point::new(0, 0),
end_point: Point::new(0, directive_start),
},
Range {
start_byte: directive_end,
end_byte: source_code_end,
start_point: Point::new(0, directive_end),
end_point: Point::new(0, source_code_end),
},
])
.unwrap();
let tree = parser
.parse_with(&mut chunked_input(&source_code, 3), Some(&first_tree))
.unwrap();
assert_eq!(
tree.root_node().to_sexp(),
concat!(
"(fragment (text) (element",
" (start_tag (tag_name))",
" (element (start_tag (tag_name)) (end_tag (tag_name)))",
" (end_tag (tag_name))))"
)
);
assert_eq!(
tree.changed_ranges(&first_tree).collect::<Vec<_>>(),
vec![
// The first range that has changed syntax is the range of the newly-inserted text.
Range {
start_byte: 0,
end_byte: prefix.len(),
start_point: Point::new(0, 0),
end_point: Point::new(0, prefix.len()),
},
// Even though no edits were applied to the outer `div` element,
// its contents have changed syntax because a range of text that
// was previously included is now excluded.
Range {
start_byte: directive_start,
end_byte: directive_end,
start_point: Point::new(0, directive_start),
end_point: Point::new(0, directive_end),
},
]
);
}
#[test]
fn test_parsing_with_a_newly_included_range() {
let source_code = "<div><%= foo() %></div><span><%= bar() %></span><%= baz() %>";
let range1_start = source_code.find(" foo").unwrap();
let range2_start = source_code.find(" bar").unwrap();
let range3_start = source_code.find(" baz").unwrap();
let range1_end = range1_start + 7;
let range2_end = range2_start + 7;
let range3_end = range3_start + 7;
// Parse only the first code directive as JavaScript
let mut parser = Parser::new();
parser.set_language(get_language("javascript")).unwrap();
parser
.set_included_ranges(&[simple_range(range1_start, range1_end)])
.unwrap();
let tree = parser
.parse_with(&mut chunked_input(&source_code, 3), None)
.unwrap();
assert_eq!(
tree.root_node().to_sexp(),
concat!(
"(program",
" (expression_statement (call_expression function: (identifier) arguments: (arguments))))",
)
);
// Parse both the first and third code directives as JavaScript, using the old tree as a
// reference.
parser
.set_included_ranges(&[
simple_range(range1_start, range1_end),
simple_range(range3_start, range3_end),
])
.unwrap();
let tree2 = parser
.parse_with(&mut chunked_input(&source_code, 3), Some(&tree))
.unwrap();
assert_eq!(
tree2.root_node().to_sexp(),
concat!(
"(program",
" (expression_statement (call_expression function: (identifier) arguments: (arguments)))",
" (expression_statement (call_expression function: (identifier) arguments: (arguments))))",
)
);
assert_eq!(
tree2.changed_ranges(&tree).collect::<Vec<_>>(),
&[simple_range(range1_end, range3_end)]
);
// Parse all three code directives as JavaScript, using the old tree as a
// reference.
parser
.set_included_ranges(&[
simple_range(range1_start, range1_end),
simple_range(range2_start, range2_end),
simple_range(range3_start, range3_end),
])
.unwrap();
let tree3 = parser.parse(&source_code, Some(&tree)).unwrap();
assert_eq!(
tree3.root_node().to_sexp(),
concat!(
"(program",
" (expression_statement (call_expression function: (identifier) arguments: (arguments)))",
" (expression_statement (call_expression function: (identifier) arguments: (arguments)))",
" (expression_statement (call_expression function: (identifier) arguments: (arguments))))",
)
);
assert_eq!(
tree3.changed_ranges(&tree2).collect::<Vec<_>>(),
&[simple_range(range2_start + 1, range2_end - 1)]
);
}
#[test]
fn test_parsing_with_included_ranges_and_missing_tokens() {
let (parser_name, parser_code) = generate_parser_for_grammar(
r#"{
"name": "test_leading_missing_token",
"rules": {
"program": {
"type": "SEQ",
"members": [
{"type": "SYMBOL", "name": "A"},
{"type": "SYMBOL", "name": "b"},
{"type": "SYMBOL", "name": "c"},
{"type": "SYMBOL", "name": "A"},
{"type": "SYMBOL", "name": "b"},
{"type": "SYMBOL", "name": "c"}
]
},
"A": {"type": "SYMBOL", "name": "a"},
"a": {"type": "STRING", "value": "a"},
"b": {"type": "STRING", "value": "b"},
"c": {"type": "STRING", "value": "c"}
}
}"#,
)
.unwrap();
let mut parser = Parser::new();
parser
.set_language(get_test_language(&parser_name, &parser_code, None))
.unwrap();
// There's a missing `a` token at the beginning of the code. It must be inserted
// at the beginning of the first included range, not at {0, 0}.
let source_code = "__bc__bc__";
parser
.set_included_ranges(&[
Range {
start_byte: 2,
end_byte: 4,
start_point: Point::new(0, 2),
end_point: Point::new(0, 4),
},
Range {
start_byte: 6,
end_byte: 8,
start_point: Point::new(0, 6),
end_point: Point::new(0, 8),
},
])
.unwrap();
let tree = parser.parse(source_code, None).unwrap();
let root = tree.root_node();
assert_eq!(
root.to_sexp(),
"(program (A (MISSING a)) (b) (c) (A (MISSING a)) (b) (c))"
);
assert_eq!(root.start_byte(), 2);
assert_eq!(root.child(3).unwrap().start_byte(), 4);
}
#[test]
fn test_grammars_that_can_hang_on_eof() {
let (parser_name, parser_code) = generate_parser_for_grammar(
r#"
{
"name": "test_single_null_char_regex",
"rules": {
"source_file": {
"type": "SEQ",
"members": [
{ "type": "STRING", "value": "\"" },
{ "type": "PATTERN", "value": "[\\x00]*" },
{ "type": "STRING", "value": "\"" }
]
}
},
"extras": [ { "type": "PATTERN", "value": "\\s" } ]
}
"#,
)
.unwrap();
let mut parser = Parser::new();
parser
.set_language(get_test_language(&parser_name, &parser_code, None))
.unwrap();
parser.parse("\"", None).unwrap();
let (parser_name, parser_code) = generate_parser_for_grammar(
r#"
{
"name": "test_null_char_with_next_char_regex",
"rules": {
"source_file": {
"type": "SEQ",
"members": [
{ "type": "STRING", "value": "\"" },
{ "type": "PATTERN", "value": "[\\x00-\\x01]*" },
{ "type": "STRING", "value": "\"" }
]
}
},
"extras": [ { "type": "PATTERN", "value": "\\s" } ]
}
"#,
)
.unwrap();
parser
.set_language(get_test_language(&parser_name, &parser_code, None))
.unwrap();
parser.parse("\"", None).unwrap();
let (parser_name, parser_code) = generate_parser_for_grammar(
r#"
{
"name": "test_null_char_with_range_regex",
"rules": {
"source_file": {
"type": "SEQ",
"members": [
{ "type": "STRING", "value": "\"" },
{ "type": "PATTERN", "value": "[\\x00-\\x7F]*" },
{ "type": "STRING", "value": "\"" }
]
}
},
"extras": [ { "type": "PATTERN", "value": "\\s" } ]
}
"#,
)
.unwrap();
parser
.set_language(get_test_language(&parser_name, &parser_code, None))
.unwrap();
parser.parse("\"", None).unwrap();
}
fn simple_range(start: usize, end: usize) -> Range {
Range {
start_byte: start,
end_byte: end,
start_point: Point::new(0, start),
end_point: Point::new(0, end),
}
}
fn chunked_input<'a>(text: &'a str, size: usize) -> impl FnMut(usize, Point) -> &'a [u8] {
move |offset, _| text[offset..text.len().min(offset + size)].as_bytes()
}
|
extern crate dialoguer;
use dialoguer::{theme::ColorfulTheme, Checkboxes, Confirmation, Editor, Input};
use std::fs::File;
use std::io::prelude::*;
use std::{env, fmt};
#[derive(Debug)]
struct Link {
text: String,
url: String,
}
#[derive(Debug)]
struct Commands {
deps: String,
build: String,
test: String,
install: String,
}
impl Commands {
fn is_empty(&self) -> bool {
self.deps.is_empty()
&& self.build.is_empty()
&& self.test.is_empty()
&& self.install.is_empty()
}
}
#[derive(Debug)]
pub struct Readme {
name: String,
description: String,
image: Link,
commands: Commands,
usage: String,
}
impl Readme {
pub fn new() -> Self {
let current_dir = env::current_dir().unwrap();
let name = current_dir.file_name().unwrap();
let name_str = name.to_str().unwrap();
Readme {
name: String::from(name_str),
description: String::from(""),
image: Link {
text: String::from("screenshot"),
url: String::from(""),
},
commands: Commands {
deps: String::from(""),
build: String::from(""),
test: String::from(""),
install: String::from(""),
},
usage: String::from(""),
}
}
pub fn survey(&mut self) {
self.name = Input::with_theme(&ColorfulTheme::default())
.with_prompt("Project's name")
.default(self.name.clone())
.interact()
.unwrap();
if Confirmation::new()
.with_text("Would you like to add a description?")
.interact()
.unwrap()
{
if let Some(desc) = Editor::new().edit("").unwrap() {
self.description = desc;
}
}
if Confirmation::new()
.with_text("Would you like to add an image?")
.interact()
.unwrap()
{
self.image.text = Input::with_theme(&ColorfulTheme::default())
.with_prompt("Image's alt text")
.default(self.image.text.clone())
.interact()
.unwrap();
self.image.url = Input::with_theme(&ColorfulTheme::default())
.with_prompt("Image's URL")
.default(self.image.url.clone())
.interact()
.unwrap();
}
let readme_commands = &[
"Command to install the dependencies",
"Command to build the project",
"Command to run the units tests",
"Command to install the project",
];
let commands_to_prompt_for = Checkboxes::with_theme(&ColorfulTheme::default())
.with_prompt("Choose the commands you want to add")
.items(&readme_commands[..])
.interact()
.unwrap();
if !commands_to_prompt_for.is_empty() {
for command in commands_to_prompt_for {
match command {
0 => {
self.commands.deps = Input::with_theme(&ColorfulTheme::default())
.with_prompt(readme_commands[command])
.default(self.commands.deps.clone())
.interact()
.unwrap()
}
1 => {
self.commands.build = Input::with_theme(&ColorfulTheme::default())
.with_prompt(readme_commands[command])
.default(self.commands.build.clone())
.interact()
.unwrap()
}
2 => {
self.commands.test = Input::with_theme(&ColorfulTheme::default())
.with_prompt(readme_commands[command])
.default(self.commands.test.clone())
.interact()
.unwrap()
}
3 => {
self.commands.install = Input::with_theme(&ColorfulTheme::default())
.with_prompt(readme_commands[command])
.default(self.commands.install.clone())
.interact()
.unwrap()
}
_ => panic!("unknown selection"),
}
}
}
if Confirmation::new()
.with_text("Would you like to add a usage example?")
.interact()
.unwrap()
{
if let Some(us) = Editor::new().edit("").unwrap() {
self.usage = us;
}
}
}
pub fn save(&self) {
let mut readme = String::new();
readme.push_str(&fmt::format(format_args!("# {}\n", self.name)));
if !self.description.is_empty() {
readme.push_str(&self.description);
readme.push_str("\n")
}
if !self.image.url.is_empty() {
readme.push_str(&fmt::format(format_args!(
"\n\n",
self.image.text, self.image.url
)));
}
if !self.commands.is_empty() {
readme.push_str("\n# Development\n\n");
if !self.commands.deps.is_empty() {
readme.push_str("## Dependencies\n");
readme.push_str(&fmt::format(format_args!(
"```\n{}\n```\n\n",
self.commands.deps
)));
}
if !self.commands.build.is_empty() {
readme.push_str("## Building\n");
readme.push_str(&fmt::format(format_args!(
"```\n{}\n```\n\n",
self.commands.build
)));
}
if !self.commands.test.is_empty() {
readme.push_str("## Testing\n");
readme.push_str(&fmt::format(format_args!(
"```\n{}\n```\n\n",
self.commands.test
)));
}
if !self.commands.install.is_empty() {
readme.push_str("## Installing\n");
readme.push_str(&fmt::format(format_args!(
"```\n{}\n```\n\n",
self.commands.install
)));
}
}
if !self.usage.is_empty() {
readme.push_str("# Usage\n");
readme.push_str(&fmt::format(format_args!("```\n{}\n```\n", self.usage)));
}
let mut file = File::create("README.md").unwrap();
let _ = file.write_all(readme.as_bytes());
}
}
|
impl Solution {
pub fn xor_operation(n: i32, start: i32) -> i32 {
let mut res = 0;
for i in 0..n{
let num = start + 2 * i;
res ^= num;
}
res
}
} |
/// These sizes determine the maximum number of tables, columns and rows,
/// but also have a big impact on memory and disk usage.
/// Changing any of these is a backward-incompatible, breaking change.
/// Table pointer size (>=1)
//noinspection RsTypeAliasNaming
pub type uTab = u16;
/// Column pointer size within table (>=1)
//noinspection RsTypeAliasNaming
pub type uCol = u16;
/// Row pointer size within table (>=1)
//noinspection RsTypeAliasNaming
pub type uRow = u32;
/// Global string interning pool pointer size (>=1)
//noinspection RsTypeAliasNaming
pub type uIntrn = u32;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Hash, Default)]
pub struct TableIx {
index: uTab,
}
impl From<TableIx> for usize {
fn from(value: TableIx) -> usize {
value.index as usize
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Hash, Default)]
pub struct ColumnIx {
index: uCol,
}
impl From<ColumnIx> for usize {
fn from(value: ColumnIx) -> usize {
value.index as usize
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Hash, Default)]
pub struct RowIx {
index: uTab,
}
impl From<RowIx> for usize {
fn from(value: RowIx) -> usize {
value.index as usize
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Hash, Default)]
pub struct InternIx {
index: uTab,
}
impl From<InternIx> for usize {
fn from(value: InternIx) -> usize {
value.index as usize
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_indexing() {
let data = vec![1, 2, 3, 4, 5, 6, 7, 8];
let index: uTab = 7;
assert_eq!(8, data[index as usize]);
let index: uCol = 7;
assert_eq!(8, data[index as usize]);
let index: uRow = 7;
assert_eq!(8, data[index as usize]);
let index: uStr = 7;
assert_eq!(8, data[index as usize]);
}
}
|
pub use self::array1::Array1;
pub use self::array2::Array2;
pub mod ops;
mod array1;
mod array2;
|
use super::*;
use nom::character::complete::anychar;
use nom::multi::many_till;
pub fn control_character(input: LexInput) -> IResult<LexInput, char> {
// parse out control character after slash
return alt((
map(char_parse('n'), |_| '\n'),
map(char_parse('\\'), |_| '\\'),
anychar,
))(input);
}
pub fn short_string(input: LexInput) -> IResult<LexInput, Lex> {
// get start quote
let (input, start_quote) = alt((char_parse('\''), char_parse('"')))(input)?;
let (input, loc) = position(input)?;
// start parsing all the content
let no_quotes: &str = &(start_quote.to_string() + "\\");
let non_quote_parser = map(none_of(no_quotes), |c| c);
let slash_parser = map(char_parse('\\'), |e| e);
let parse_out_short_string = alt((non_quote_parser, preceded(slash_parser, control_character)));
let (input, string_contents) = many0(parse_out_short_string)(input)?;
let (input, _) = char_parse(start_quote)(input)?;
return Ok((
input,
Lex {
location: loc,
val: LexValue::Str(string_contents.iter().collect()),
},
));
}
pub fn long_string(input: LexInput) -> IResult<LexInput, Lex> {
use parse::utils::surrounded;
// first let's find the opening brackets
let (input, equals_level) = surrounded(tag("["), many0(char_parse('=')), tag("["))(input)?;
let (input, loc) = position(input)?;
let mut cl_bkt = "]".to_owned();
let eq: String = equals_level.into_iter().collect();
cl_bkt.push_str(&eq);
cl_bkt.push_str("]");
let closing_bracket = tag(cl_bkt.as_str());
let (input, (string_contents, _)) = many_till(anychar, closing_bracket)(input)?;
return Ok((
input,
Lex {
location: loc,
val: LexValue::Str(string_contents.into_iter().collect()),
},
));
}
pub fn parse_string(input: LexInput) -> IResult<LexInput, Lex> {
return alt((short_string, long_string))(input);
}
// pub fn parse_string(input: &str) -> IResult<&str, Lex> {
// match parse_out_string_contents(input) {
// None => Err(Err::Error((input, ErrorKind::Tag))),
// Some((
// string_contents,
// remainder
// // OMG the next line is a mess
// )) => Ok((&remainder, Lex::Str(string_contents.to_string())))
// }
// }
|
// run with `cargo test -- --nocapture` for the logs
#[cfg(test)]
mod tests {
use force_a2dp::parser;
#[test]
fn test_actual_card() {
let txt = r#" 3 card(s) available.
index: 13
name: <bluez_card.1A_11_11_1A_11_11>
driver: <module-bluez5-device.c>
owner module: 40
properties:
device.description = "Bose"
device.string = "1A:11:11:1A:11:11"
device.api = "bluez"
device.class = "sound"
device.bus = "bluetooth"
device.form_factor = "headphone"
bluez.path = "/org/bluez/hci0/dev_1A_11_11_1A_11_11"
bluez.class = "0x240418"
bluez.alias = "Bose"
device.icon_name = "audio-headphones-bluetooth"
profiles:
headset_head_unit: Headset Head Unit (HSP/HFP) (priority 30, available: unknown)
a2dp_sink: High Fidelity Playback (A2DP Sink) (priority 40, available: yes)
off: Off (priority 0, available: yes)
active profile: <a2dp_sink>
sinks:
bluez_sink.1A_11_11_1A_11_11.a2dp_sink/#28: Bose
sources:
bluez_sink.1A_11_11_1A_11_11.a2dp_sink.monitor/#45: Monitor of Bose
ports:
headphone-output: Headphone (priority 0, latency offset 0 usec, available: yes)
properties:
headphone-input: Bluetooth Input (priority 0, latency offset 0 usec, available: unknown)
properties:
"#;
let card = parser::get_bt_card_info(txt).unwrap();
println!("CARD: {:?}", card);
let mac = card.0;
let sink = card.1;
assert_eq!("1A:11:11:1A:11:11", &mac);
assert_eq!("bluez_card.1A_11_11_1A_11_11", &sink);
}
#[test]
fn test_no_card() {
let txt = r#" index: 3
name: <alsa_card.pci-xxxxx>
driver: <module-alsa-card.c>
owner module: xx
properties:
alsa.card = "0"
alsa.card_name = "xxx"
alsa.long_card_name = " xx xx"
alsa.driver_name = "snd_hda_intel"
device.bus_path = "pci-xxxx:xx:xx.x"
sysfs.path = "/devices/pcixxxx:xx/xxxx:xx:xx.x/sound/cardx"
device.bus = "pci"
device.vendor.id = "xx"
device.vendor.name = "xx Corporation"
device.product.id = "xxxx"
device.form_factor = "internal"
device.string = "0"
device.description = "Built-in Audio"
module-udev-detect.discovered = "1"
device.icon_name = "audio-card-pci"
"#;
let card = parser::get_bt_card_info(txt);
assert_eq!(None, card);
}
}
|
/// # Mutation Queries
///
/// Designed to reduce boilerplate when updating tables.
/// The query does nothing special, just calls the provided methods on the tables
/// obtained by `unsafe_view::<key, value>()`.
///
/// ## Safety
///
/// Updates on Storage are an unsafe operation. Be sure that no other threads have write access to
/// the tables you're mutating!
///
/// ```
/// use caolo_sim::query;
/// use caolo_sim::prelude::*;
///
/// let mut store = futures_lite::future::block_on( SimpleExecutor.initialize(caolo_sim::executor::GameConfig {
/// world_radius: 1,
/// room_radius: 10,
/// ..Default::default()
/// },
/// ));
///
/// let entity_1 = store.insert_entity();
/// let entity_2 = store.insert_entity();
///
/// query!(
/// mutate
/// store
/// {
/// EntityId, Bot, .insert(entity_1);
/// EntityId, Bot, .insert(entity_2);
/// EntityId, CarryComponent,
/// .insert(entity_1, CarryComponent{carry: 12, carry_max: 69});
/// EntityId, CarryComponent,
/// .insert(entity_2, CarryComponent{carry: 0, carry_max: 69});
/// }
/// );
/// ```
#[macro_export]
macro_rules! query {
(
mutate
$store: ident
{
$(
$id: ty, $row: ty, $(.$op: ident ( $($args: expr),* ))*
);*;
}
) => {
{
$(
$store.unsafe_view::<$id, $row>()
. $(
$op($($args),*)
).*
);*
}
};
}
///
///# Examples
///
///## Join iterators
///
///```
/// use caolo_sim::query;
/// use caolo_sim::prelude::*;
/// use caolo_sim::join;
/// use caolo_sim::tables::JoinIterator;
///
/// let mut store = futures_lite::future::block_on( SimpleExecutor.initialize(caolo_sim::executor::GameConfig {
/// world_radius: 1,
/// room_radius: 10,
/// ..Default::default()
/// },
/// ));
///
/// let entity_1 = store.insert_entity();
/// let entity_2 = store.insert_entity();
/// let entity_3 = store.insert_entity();
///
/// // Initialize entities ...
///
/// query!(
/// mutate
/// store
/// {
/// EntityId, Bot, .insert(entity_1);
/// EntityId, Bot, .insert(entity_2);
///
/// EntityId, PositionComponent, .insert(entity_1, PositionComponent::default());
/// EntityId, PositionComponent, .insert(entity_2, PositionComponent::default());
/// EntityId, PositionComponent, .insert(entity_3, PositionComponent::default());
///
/// // notice how entity_3 is not a bot, but has carry
///
/// EntityId, CarryComponent,
/// .insert(entity_1, CarryComponent{carry: 12, carry_max: 69});
/// EntityId, CarryComponent,
/// .insert(entity_2, CarryComponent{carry: 30, carry_max: 69});
/// EntityId, CarryComponent,
/// .insert(entity_3, CarryComponent{carry: 40, carry_max: 69});
/// }
/// );
///
/// let bot_table = store.view::<EntityId, Bot>();
/// let bot = bot_table.iter();
/// let pos_table = store.view::<EntityId, PositionComponent>();
/// let pos = pos_table.iter();
/// let carry_table = store.view::<EntityId, CarryComponent>();
/// let car = carry_table.iter();
///
/// let res: i32 = join!( [ bot , pos , car ])
/// // data has fields `carry` and `bot`, specified in the macro invocation
/// // these are references to their respective components...
/// // we'll extract the carry amount
/// //
/// // pos_components are default (0,0), we access them for demo purposes...
/// .map(|(id, (bot, pos, car))|{ car.carry as i32 + pos.0.pos.q })
/// .sum();
///
/// assert_eq!(res, 42); // entity_1 carry + entity_2 carry
///```
///
///## Join on storage
///
///```
/// use caolo_sim::query;
/// use caolo_sim::prelude::*;
///
/// // these rows are mandatory
/// use caolo_sim::join;
/// use caolo_sim::tables::JoinIterator;
///
/// let mut store = futures_lite::future::block_on( SimpleExecutor.initialize(caolo_sim::executor::GameConfig {
/// world_radius: 1,
/// room_radius: 10,
/// ..Default::default()
/// },
/// ));
///
/// let entity_1 = store.insert_entity();
/// let entity_2 = store.insert_entity();
/// let entity_3 = store.insert_entity();
///
/// query!(
/// mutate
/// store
/// {
/// EntityId, Bot, .insert(entity_1);
/// EntityId, Bot, .insert(entity_2);
///
/// EntityId, PositionComponent, .insert(entity_1, PositionComponent::default());
/// EntityId, PositionComponent, .insert(entity_2, PositionComponent::default());
/// EntityId, PositionComponent, .insert(entity_3, PositionComponent::default());
///
/// // notice how entity_3 is not a bot, but has carry
///
/// EntityId, CarryComponent,
/// .insert(entity_1, CarryComponent{carry: 12, carry_max: 69});
/// EntityId, CarryComponent,
/// .insert(entity_2, CarryComponent{carry: 30, carry_max: 69});
/// EntityId, CarryComponent,
/// .insert(entity_3, CarryComponent{carry: 40, carry_max: 69});
/// }
/// );
///
/// let res: i32 = join!(
/// store
/// EntityId
/// [ bot : Bot,
/// pos_component : PositionComponent,
/// carry_component : CarryComponent ]
/// )
/// // data has fields `carry` and `bot`, specified in the macro invocation
/// // these are references to their respective components...
/// // we'll extract the carry amount
/// //
/// // pos_components are default (0,0), we access them for demo purposes...
/// .map(|(id, (_bot_component, pos_component, carry_component))| {
/// carry_component.carry as i32 + pos_component.0.pos.q
/// })
/// .sum();
///
/// assert_eq!(res, 42); // entity_1 carry + entity_2 carry
///```
#[macro_export]
macro_rules! join {
(
[
$it0: ident,
$(
$its: ident
),+
]
) => {{
join!(@iter $it0, $($its),*)
.map(
// closure taking id and a nested tuple of pairs
|(
id,
join!(@args $it0, $($its),*)
)| {
(id,
// flatten the tuple
($it0, $($its),*)
)
}
)
}};
(
$storage: ident
$id: ty
[
$name0: ident : $row0: ty,
$(
$names: ident : $rows: ty
),+
]
) => {{
join!(@join $storage $id, $row0, $($rows),*)
.map(
// closure taking id and a nested tuple of pairs
|(
id,
join!(@args $name0, $($names),*)
)| {
(id,
// flatten the tuple
($name0, $($names),*)
)
}
)
}};
(@iter $it: ident) => {
$it
};
(@iter $head: ident,
$(
$tail: ident
),+
) => {
JoinIterator::new(
$head,
join!(@iter $($tail),*)
)
};
(@join $storage: ident $id: ty, $row: ty) => {
// stop the recursion
$storage.view::<$id, $row>().iter()
};
(@join $storage: ident $id: ty, $row0: ty,
$(
$rows: ty
),+
) => {
JoinIterator::new(
$storage.view::<$id, $row0>().iter(),
join!(@join $storage $id, $($rows),*)
)
};
(@args $name: ident) => {
// stop the recursion
$name
};
(@args $name0: ident, $( $names: ident),+) => {
// nested tuple arguments
(
$name0,
join!(@args $( $names),*)
)
};
}
/// # `iterby`
///
///`iterby` will create `iterby_<name>` methods, that will iterate on the given table and output
/// a tuple (struct) with all optional fields.
/// To use this the given table must have an `iter` method returning a pair of (key, value), only
/// for keys that are in the given table.
/// Will call `get` for all other tables.
///
/// For this reason I do not recommend using this often, as hand written `join!`-s can be a lot
/// more performant.
///
/// This is mostly here for serialization, when communicating with clients.
#[macro_export(local_inner_macros)]
macro_rules! archetype {
(
module $module: ident
key $id: ty,
$(
$(attr $attr: meta )* table $row: ty : $table_ty: ty = $name: ident
),*
$(
iterby $it: ident
)*
) => {
pub mod $module {
use super::*;
use crate::tables::{Table, Component};
use crate::storage::views::{UnsafeView, View};
use crate::storage::{HasTable, DeleteById, DeferredDeleteById};
pub type Key = $id;
#[derive(cao_storage_derive::CaoStorage, Default)]
$(
#[cao_storage_table($id, $name, $row)]
)*
$(
#[cao_storage_iterby($it, $id)]
)*
pub struct Archetype {
$( $(#[ $attr ] )*
pub(crate) $name: <$row as crate::tables::Component<$id>>::Table ),
+,
}
archetype!(@implement_tables $($name, $id, $row, $table_ty )*);
impl Archetype {
#[allow(unused)]
#[allow(clippy::too_many_arguments)]
pub fn new(
$(
$name: <$row as crate::tables::Component<$id>>::Table
),*
) -> Self {
Self {
$( $name ),*
}
}
}
}
};
(
@implement_tables
$($name: ident, $id: ty, $row: ty, $table_ty: ty )*
) => {
$(
impl Component<$id> for $row {
type Table=$table_ty;
}
impl HasTable<$id, $row> for Archetype {
fn view(&'_ self) -> View<'_, $id, $row>{
View::from_table(&self.$name)
}
fn unsafe_view(&mut self) -> UnsafeView<$id, $row>{
UnsafeView::from_table(&mut self.$name)
}
}
)*
};
}
/// Create an empty struct that can be used a Storage key
#[macro_export(local_inner_macros)]
macro_rules! empty_key {
($name: ident) => {
#[derive(
Debug,
Clone,
Default,
Ord,
PartialOrd,
Eq,
PartialEq,
Copy,
Hash,
Serialize,
Deserialize,
)]
pub struct $name;
unsafe impl Send for $name {}
};
}
|
//! # duo-m3u
//! duoのm3uを`~/Downloads`に書き出し、オープンします。
//! ### usage
//! ```bash
//! $ duo-m3u [Disc01|Disc02|Disc03|Disc04|Disc05|review] [skip num] [take num]
//! ```
use glob::glob;
use std::env;
use std::process::Command;
use std::process;
extern crate my_lib;
/// make m3u file and open.
fn main() {
// args
let args: Vec<String> = env::args().collect();
//println!("{:?}", args);
if args.len() != 4 {
println!("usage: \nduo-m3u [Disc01|Disc02|Disc03|Disc04|Disc05|review] [skip num] [take num]");
process::exit(1);
}
let skip: usize = args[2].parse().unwrap();
let take: usize = args[3].parse().unwrap();
let disc = &args[1];
// pattern
let pattern = if disc == "review" {
String::from("/Volumes/MySD/iTunes/iTunes Media/Music/DUO3.0/DUO3.0CD復習用/*.m4a")
} else {
format!("{}{}{}","/Volumes/MySD/iTunes/iTunes Media/Music/DUO3.0/DUO3.0CD基礎用", disc, "/*.m4a")
};
let output_file = "/Users/seijiro/Downloads/duo.m3u";
// make m3u file
make_m3u(&output_file, &pattern, take, skip);
// and open
let _ = Command::new("open")
.arg(output_file)
.output()
.expect("failed to execute process");
}
/// make m3u file
fn make_m3u(output_file: &str,pattern: &str, take: usize, skip: usize) {
//my_lib::typename(pattern);
//println!("{}", pattern);
let mut m3u_str = String::from("#EXTM3U\n");
for entry in glob(pattern).expect("Failed").skip(skip).take(take) {
//for entry in glob(pattern).expect("Failed") {
match entry {
Ok(path) => {
//println!("{:?}", path.display());
m3u_str = push_m3u_element(&m3u_str, path.to_str().unwrap());
},
Err(e) => println!("{:?}", e),
}
}
//println!("{}",m3u_str);
let _ = my_lib::write_file(output_file,&m3u_str);
}
/// #EXTINF:1450,just do it.mp4
/// /Volumes/smb/sdd1/video/youtube/just do it.mp4
fn push_m3u_element(m3u: &str,elem: &str) -> String {
let mut m3u = String::from(m3u);
m3u.push_str("#EXTINF:-1,a.m4a\n");
m3u.push_str(&format!("{}\n",elem));
m3u
}
|
extern crate futures;
extern crate hyper;
extern crate rust_tags;
mod pages;
use futures::future::Future;
use hyper::{Method, StatusCode};
use hyper::header::ContentType;
use hyper::mime;
use hyper::server::{Http, Request, Response, Service};
use pages::*;
struct JasonLongshore;
impl Service for JasonLongshore {
type Request = Request;
type Response = Response;
type Error = hyper::Error;
// The future representing the eventual Response your call will
// resolve to. This can change to whatever Future you need.
type Future = Box<Future<Item=Self::Response, Error=Self::Error>>;
fn call(&self, req: Request) -> Self::Future {
let html = ContentType(mime::TEXT_HTML_UTF_8);
let css = ContentType(mime::TEXT_CSS);
let mut response = Response::new();
let (ct, body) = match (req.method(), req.path()) {
(&Method::Get, "/") => (html, page_home::content()),
(&Method::Get, "/css") => (css, page_css::content()),
(&Method::Get, "/projects") => (html, page_projects::content()),
(&Method::Get, "/blog") => (html, page_blog::content()),
(&Method::Get, "/contact") => (html, page_contact::content()),
_ => {
response.set_status(StatusCode::NotFound);
(html, page_404::content())
},
};
let mut response = response.with_header(ct);
response.set_body(body);
Box::new(futures::future::ok(response))
}
}
fn main() {
let addr_str = "0.0.0.0:3000";
let addr = addr_str.parse().unwrap();
let server = Http::new().bind(&addr, || Ok(JasonLongshore)).unwrap();
println!("listening on {}", addr_str);
server.run().unwrap();
}
|
#![feature(test)]
extern crate test;
extern crate bit_vec;
use bit_vec::BitVec;
use std::collections::HashSet;
// See: https://users.rust-lang.org/t/integer-square-root-algorithm/13529/14
fn isqrt(x: usize) -> usize {
let r = (x as f64).sqrt() as usize;
if r <= 4096 { return r }
(x/r + r)/2
}
fn pi(x: f64) -> usize {
// approximation of prime counting function
(x/x.ln()) as usize
}
fn atkin(limit: usize) -> std::vec::Vec<usize> {
// Attempt at implementing a sieve of Atkin
// We want the 10001'st prime. We know that pi(x) ~ x/log(x), so we solve
// for x, and (choosing the sensible solution) find that x ~ 117,000.
// pi(x) provides an upper bound, so we're free to just take that.
// false means composite
let mut sieve = BitVec::from_elem(limit, false);
let mut results: Vec<usize> = Vec::with_capacity(pi(limit as f64));
let s1: HashSet<usize> = [1, 13, 17, 29, 37, 41, 49, 53].iter().cloned().collect();
let s2: HashSet<usize> = [7, 19, 31, 43].iter().cloned().collect();
let s3: HashSet<usize> = [11, 23, 47, 59].iter().cloned().collect();
let len = sieve.len();
results.push(2);
results.push(3);
results.push(5);
for i in 0..len {
let n = i + 1;
let r = n % 60;
if s1.contains(&r) {
let y_lim = isqrt(n) + 1;
let ys = (0..y_lim).filter(|y| y % 2 == 1);
for y in ys {
let yp = y.pow(2);
let x_lim = isqrt((n-yp)/4) + 1;
for x in 0..x_lim {
if 4*x.pow(2) + yp == n {
if sieve[i] {
sieve.set(i, false);
} else {
sieve.set(i, true);
}
}
}
}
} else if s2.contains(&r) {
let y_lim = isqrt(n) + 1;
let ys = (0..y_lim).filter(|y| y % 2 == 0);
for y in ys {
let yp = y.pow(2);
let x_lim = isqrt((n-yp)/3) + 1;
let xs = (1..x_lim).filter(|x| x % 2 == 1);
for x in xs {
if 3*x.pow(2) + yp == n {
if sieve[i] {
sieve.set(i, false);
} else {
sieve.set(i, true);
}
}
}
}
} else if s3.contains(&r) {
// derived with the help of woooolfram alpha
let x_lim = (isqrt(2*n + 3) + 1)/2;
let xs = 1..x_lim;
for x in xs {
for y in 0..x {
if 3*x.pow(2) - y.pow(2) == n {
if sieve[i] {
sieve.set(i, false);
} else {
sieve.set(i, true);
}
}
}
}
}
}
for i in 1..len {
let n = i + 1;
if sieve[i] {
results.push(n);
let sqr = n.pow(2);
for j in 1..limit/sqr+1 {
sieve.set(j*sqr-1, false);
}
}
}
println!("There are {} primes less than {}", results.len(), limit);
// 10001st prime -->
if results.len() > 10000 {
println!("The 10001st prime is {}", results[10000]);
}
results
}
fn main() {
atkin(140000);
}
#[cfg(test)]
mod tests {
use super::*;
use test::Bencher;
#[bench]
fn test_128(b: &mut Bencher) {
b.iter(|| {
atkin(128);
});
}
#[bench]
fn test_256(b: &mut Bencher) {
b.iter(|| {
atkin(256);
});
}
#[bench]
fn test_512(b: &mut Bencher) {
b.iter(|| {
atkin(512);
});
}
#[bench]
fn test_1024(b: &mut Bencher) {
b.iter(|| {
atkin(1024);
});
}
#[bench]
fn test_2048(b: &mut Bencher) {
b.iter(|| {
atkin(2048);
});
}
#[bench]
fn test_4096(b: &mut Bencher) {
b.iter(|| {
atkin(4096);
});
}
#[bench]
fn test_8192(b: &mut Bencher) {
b.iter(|| {
atkin(8192);
});
}
#[bench]
fn test_16384(b: &mut Bencher) {
b.iter(|| {
atkin(16384);
});
}
#[bench]
fn test_32768(b: &mut Bencher) {
b.iter(|| {
atkin(32768);
});
}
#[bench]
fn test_65536(b: &mut Bencher) {
b.iter(|| {
atkin(65536);
});
}
}
|
use crate::level9::Computer;
use std::collections::VecDeque;
use std::collections::HashMap;
use std::error::Error;
#[derive(Clone)]
struct State {
computer: Computer,
tick: i32,
screen: HashMap<(i64,i64), i64>,
paddle: (i64, i64),
ball: (i64, i64)
}
impl State {
fn draw(&self) {
draw(&self.screen);
println!("Tick: {}", self.tick);
println!("Paddle: {:?}", self.paddle);
println!("Ball: {:?}", self.ball);
}
}
pub fn part1() -> Result<(), Box<dyn Error>> {
let mut joystick = VecDeque::new();
let mut score = 0;
let mut debug = false;
let mut current_state: State = State {
computer: Computer::new("src/level13/input.txt"),
tick: 0,
screen: HashMap::new(),
paddle: (0,0),
ball: (0,0)
};
let mut states: HashMap<i32, State> = HashMap::new();
let mut ball_positions: Vec<(i32, i64, i64)> = vec![];
joystick.push_back(0);
while let Some(x) = current_state.computer.execute(&mut joystick.clone())?.pop_front() {
let y = current_state.computer.execute(&mut joystick.clone())?.pop_front().unwrap();
let id = current_state.computer.execute(&mut joystick.clone())?.pop_front().unwrap();
if x == -1 && y == 0 {
score = id;
continue;
} else {
if id == 3 {
current_state.paddle = (x,y);
} else if id == 4 {
current_state.ball = (x,y);
current_state.tick += 1;
}
current_state.screen.insert((y,x), id);
}
if id != 4 {
continue;
}
if current_state.ball.1 == current_state.paddle.1-1 {
let (x,y) = current_state.ball;
ball_positions = ball_positions.into_iter().filter(|(tick,_,_)| *tick != current_state.tick).collect();
ball_positions.push((current_state.tick, x, y));
ball_positions.sort();
}
if current_state.ball.1 == current_state.paddle.1+1 {
let delta = ((current_state.paddle.0 - current_state.ball.0).abs() + 4) as i32;
current_state = states.get(&(std::cmp::max(1, current_state.tick-delta))).unwrap().clone();
debug = true;
}
joystick.clear();
joystick.push_back(0);
for &(tick, x, y) in &ball_positions {
if current_state.tick > tick {
continue;
}
println!("Target: {},{}", x,y);
joystick.clear();
if current_state.paddle.0 < x {
joystick.push_back(1);
} else if current_state.paddle.0 > x {
joystick.push_back(-1);
} else {
joystick.push_back(0);
}
break;
}
states.insert(current_state.tick, current_state.clone());
current_state.draw();
if debug {
println!("{:?}", ball_positions);
// std::io::stdin().read_line(&mut String::new())?;
}
}
println!("SCORE: {}", score);
Ok(())
}
fn clear() {
println!("\x1b[2J")
}
fn draw(grid: &HashMap<(i64, i64), i64>) {
clear();
let mut min_i: i64 = 0;
let mut min_j: i64 = 0;
let mut max_i: i64 = 0;
let mut max_j: i64 = 0;
for &(i, j) in grid.keys() {
if i < min_i {
min_i = i;
}
if j < min_j {
min_j = j;
}
if i > max_i {
max_i = i;
}
if j > max_j {
max_j = j;
}
}
let h = max_i - min_i;
let w = max_j - min_j;
for i in 0..=h {
for j in 0..=w {
let val = *grid.get(&(i-min_i,j-min_j)).unwrap_or(&0);
if val == 0 {
print!(" ");
} else if val == 1 {
print!("X")
} else if val == 2 {
print!("X")
} else if val == 3 {
print!("_")
} else if val == 4 {
print!("O")
} else {
print!("{}", val);
}
}
println!();
}
}
|
use super::super::prelude::CCINT;
// WinUser.h:3839 => #define CW_USEDEFAULT ((int)0x80000000)
pub static UseDefault : CCINT = 0x80000000; |
//! The core game engine is in this crate.
pub mod block;
pub mod entity;
pub mod maths;
pub mod resource;
pub mod side;
pub mod util;
pub mod timing;
pub mod vertexattrib;
|
#[derive(Default)]
pub struct FlexArrayField<T>([T; 0]);
pub trait FlexArray<T> {
fn len(&self) -> usize;
fn flex_element<'r>(&'r self) -> &'r FlexArrayField<T>;
fn as_slice<'r>(&'r self) -> &'r [T] {
use std::mem::transmute;
unsafe { transmute::<&[T], _>(std::slice::from_raw_parts(transmute(self.flex_element()), self.len())) }
}
}
pub(crate) unsafe fn flexarray_get_array_start<T, R>(item: &T) -> *const R
where
T: Sized,
{
let item = item as *const T;
core::mem::transmute::<*const T, *const R>(item.offset(1))
}
pub(crate) unsafe fn flexarray_get_array_start_mut<T, R>(item: &mut T) -> *mut R
where
T: Sized,
{
let item = item as *mut T;
core::mem::transmute::<*mut T, *mut R>(item.offset(1))
}
|
use regex::Regex;
use lazy_static::*;
use crate::fraction::*;
// This ensures the regexes are compiled only once
lazy_static! {
static ref MIXED_NUMBER_RE: Regex = Regex::new(r"^(\-?\d+)_(\d+/\d+)$").unwrap();
static ref NUMBER_RE: Regex = Regex::new(r"^(\-?\d+)$").unwrap();
}
/// Parses the given expression as a Fraction
/// Using fractions directly makes easier to implement the arithmetic
pub fn parse_as_fraction(mixed_number_exp: &str) -> Result<Fraction, &str> {
if MIXED_NUMBER_RE.is_match(mixed_number_exp) {
let captures = MIXED_NUMBER_RE.captures(mixed_number_exp).unwrap();
let fraction = Fraction::parse_fraction(captures.get(2).unwrap().as_str())?;
Fraction::new_mixed(
captures.get(1).unwrap().as_str().parse().unwrap(),
fraction.numerator,
fraction.denominator
)
} else if Fraction::is_fraction(mixed_number_exp) {
Fraction::parse_fraction(mixed_number_exp)
} else if NUMBER_RE.is_match(mixed_number_exp) {
Fraction::new_whole(mixed_number_exp.parse().unwrap())
} else {
Err("Unparseable mixed number!")
}
}
#[cfg(test)]
mod tests;
|
use std::time::Instant;
use rand::distributions::{Distribution};
pub fn roll_dices (how_many_sixes_roll: i32) -> i32 {
let mut rng = rand::thread_rng();
let die = rand::distributions::Uniform::from(1..7);
let mut found = 0;
let mut tries = 0;
loop {
tries += 1;
let throw = die.sample(&mut rng);
if throw == 6 {
found += 1;
}
if found > how_many_sixes_roll {
break;
}
}
tries
}
pub fn time_duration_tests() {
let start = Instant::now();
let how_many_sixes_roll = 100000;
let tries = roll_dices(how_many_sixes_roll);
let duration = start.elapsed();
println!("In order to roll {} sixes you needed {} tries. Time elapsed in expensive_function() is: {:?}", how_many_sixes_roll, tries, duration);
}
|
use ethereum_types::{H160, U256};
use std::fmt::Display;
use std::fmt::Formatter;
#[derive(Debug, Default, Deserialize, Clone, PartialEq, Serialize, Hash, Eq, PartialOrd, Ord)]
pub struct Transaction {
/// Nonce
pub nonce: U256,
/// Gas Price
pub gas_price: U256,
/// Start Gas
pub start_gas: U256,
/// Recipient
/// If None, then this is a contract creation
pub to: Option<H160>,
/// Transferred value
pub value: U256,
/// Data
pub data: Vec<u8>,
/// The standardised V field of the signature.
pub v: U256,
/// The R field of the signature.
pub r: U256,
/// The S field of the signature.
pub s: U256,
}
impl Display for Transaction {
fn fmt(&self, f: &mut Formatter) -> core::fmt::Result {
write!(f, "{:#?}", self)
}
}
|
#![no_std]
pub use blake::{Blake224, Blake256, Blake28, Blake32, Blake384, Blake48, Blake512, Blake64};
pub use blake2::{Blake2b, Blake2s};
pub use edonr::{EdonR224, EdonR256, EdonR384, EdonR512};
pub use keccak::{
Keccak224, Keccak256, Keccak384, Keccak512, KeccakF1600, KeccakF200, KeccakF400, KeccakF800,
};
pub use md2::Md2;
pub use md4::Md4;
pub use md5::Md5;
pub use ripemd::{Ripemd128, Ripemd160, Ripemd256, Ripemd320};
pub use sha0::Sha0;
pub use sha1::Sha1;
pub use sha2::{Sha224, Sha256, Sha384, Sha512, Sha512Trunc224, Sha512Trunc256};
pub use sha3::{Sha3_224, Sha3_256, Sha3_384, Sha3_512, Shake128, Shake256};
pub use util::Hash;
|
fn main(){
// a list of nos
let v = vec![10,20,30];
print_vector(v);
println!("{}",v[0]); // this line gives error
}
fn print_vector(x:Vec<i32>){
println!("Inside print_vector function {:?}",x);
} |
pub mod zmq;
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
use reqwest;
use crate::apis::ResponseContent;
use super::{Error, configuration};
/// struct for typed errors of method `create_logs_index`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum CreateLogsIndexError {
Status400(crate::models::LogsApiErrorResponse),
Status403(crate::models::ApiErrorResponse),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_logs_index`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetLogsIndexError {
Status403(crate::models::ApiErrorResponse),
Status404(crate::models::LogsApiErrorResponse),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `get_logs_index_order`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum GetLogsIndexOrderError {
Status403(crate::models::ApiErrorResponse),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `list_log_indexes`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum ListLogIndexesError {
Status403(crate::models::ApiErrorResponse),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `update_logs_index`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum UpdateLogsIndexError {
Status400(crate::models::LogsApiErrorResponse),
Status403(crate::models::ApiErrorResponse),
Status429(crate::models::LogsApiErrorResponse),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `update_logs_index_order`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum UpdateLogsIndexOrderError {
Status400(crate::models::LogsApiErrorResponse),
Status403(crate::models::ApiErrorResponse),
UnknownValue(serde_json::Value),
}
/// Creates a new index. Returns the Index object passed in the request body when the request is successful.
pub async fn create_logs_index(configuration: &configuration::Configuration, body: crate::models::LogsIndex) -> Result<crate::models::LogsIndex, Error<CreateLogsIndexError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/logs/config/indexes", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&body);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<CreateLogsIndexError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Get one log index from your organization. This endpoint takes no JSON arguments.
pub async fn get_logs_index(configuration: &configuration::Configuration, name: &str) -> Result<crate::models::LogsIndex, Error<GetLogsIndexError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/logs/config/indexes/{name}", configuration.base_path, name=crate::apis::urlencode(name));
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<GetLogsIndexError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Get the current order of your log indexes. This endpoint takes no JSON arguments.
pub async fn get_logs_index_order(configuration: &configuration::Configuration, ) -> Result<crate::models::LogsIndexesOrder, Error<GetLogsIndexOrderError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/logs/config/index-order", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<GetLogsIndexOrderError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// The Index object describes the configuration of a log index. This endpoint returns an array of the `LogIndex` objects of your organization.
pub async fn list_log_indexes(configuration: &configuration::Configuration, ) -> Result<crate::models::LogsIndexListResponse, Error<ListLogIndexesError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/logs/config/indexes", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<ListLogIndexesError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Update an index as identified by its name. Returns the Index object passed in the request body when the request is successful. Using the `PUT` method updates your index’s configuration by **replacing** your current configuration with the new one sent to your Datadog organization.
pub async fn update_logs_index(configuration: &configuration::Configuration, name: &str, body: crate::models::LogsIndexUpdateRequest) -> Result<crate::models::LogsIndex, Error<UpdateLogsIndexError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/logs/config/indexes/{name}", configuration.base_path, name=crate::apis::urlencode(name));
let mut local_var_req_builder = local_var_client.put(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&body);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<UpdateLogsIndexError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// This endpoint updates the index order of your organization. It returns the index order object passed in the request body when the request is successful.
pub async fn update_logs_index_order(configuration: &configuration::Configuration, body: crate::models::LogsIndexesOrder) -> Result<crate::models::LogsIndexesOrder, Error<UpdateLogsIndexOrderError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/api/v1/logs/config/index-order", configuration.base_path);
let mut local_var_req_builder = local_var_client.put(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-API-KEY", local_var_value);
};
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("DD-APPLICATION-KEY", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&body);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<UpdateLogsIndexOrderError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
|
use core::MatrixArray;
use core::dimension::{U2, U3};
use geometry::RotationBase;
/// A D-dimensional rotation matrix.
pub type Rotation<N, D> = RotationBase<N, D, MatrixArray<N, D, D>>;
/// A 2-dimensional rotation matrix.
pub type Rotation2<N> = Rotation<N, U2>;
/// A 3-dimensional rotation matrix.
pub type Rotation3<N> = Rotation<N, U3>;
|
// This file is part of Substrate.
// Copyright (C) 2019-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fs;
use ansi_term::Color;
use tempfile::tempdir;
/// Print an error message.
fn print_error_message(message: &str) -> String {
if super::color_output_enabled() {
Color::Red.bold().paint(message).to_string()
} else {
message.into()
}
}
/// Checks that all prerequisites are installed.
///
/// # Returns
/// Returns `None` if everything was found and `Some(ERR_MSG)` if something could not be found.
pub fn check() -> Option<String> {
if !check_nightly_installed() {
return Some(print_error_message("Rust nightly not installed, please install it!"))
}
check_wasm_toolchain_installed()
}
fn check_nightly_installed() -> bool {
crate::get_nightly_cargo().is_nightly()
}
fn check_wasm_toolchain_installed() -> Option<String> {
let temp = tempdir().expect("Creating temp dir does not fail; qed");
fs::create_dir_all(temp.path().join("src")).expect("Creating src dir does not fail; qed");
let test_file = temp.path().join("src/lib.rs");
let manifest_path = temp.path().join("Cargo.toml");
fs::write(
&manifest_path,
r#"
[package]
name = "wasm-test"
version = "1.0.0"
edition = "2018"
[lib]
name = "wasm_test"
crate-type = ["cdylib"]
[workspace]
"#,
)
.expect("Writing wasm-test manifest does not fail; qed");
fs::write(&test_file, "pub fn test() {}").expect("Writing to the test file does not fail; qed");
let err_msg = print_error_message("Rust WASM toolchain not installed, please install it!");
let manifest_path = manifest_path.display().to_string();
let mut build_cmd = crate::get_nightly_cargo().command();
build_cmd.args(&[
"build",
"--target=wasm32-unknown-unknown",
"--manifest-path",
&manifest_path,
]);
if super::color_output_enabled() {
build_cmd.arg("--color=always");
}
build_cmd
.output()
.map_err(|_| err_msg.clone())
.and_then(|s| {
if s.status.success() {
Ok(())
} else {
match String::from_utf8(s.stderr) {
Ok(ref err) if err.contains("linker `rust-lld` not found") =>
Err(print_error_message("`rust-lld` not found, please install it!")),
Ok(ref err) => Err(format!(
"{}\n\n{}\n{}\n{}{}\n",
err_msg,
Color::Yellow.bold().paint("Further error information:"),
Color::Yellow.bold().paint("-".repeat(60)),
err,
Color::Yellow.bold().paint("-".repeat(60)),
)),
Err(_) => Err(err_msg),
}
}
})
.err()
}
|
use crate::irust::IRustError;
use crossterm::{style::Color, terminal::ClearType};
mod raw;
use raw::Raw;
use std::{cell::RefCell, rc::Rc};
#[derive(Debug, Clone)]
pub struct Writer<W: std::io::Write> {
last_color: Option<Color>,
pub raw: Raw<W>,
}
impl<W: std::io::Write> Writer<W> {
pub fn new(raw: Rc<RefCell<W>>) -> Self {
let raw = Raw { raw };
Self {
last_color: None,
raw,
}
}
pub fn write(
&mut self,
out: &str,
color: Color,
cursor: &mut super::cursor::Cursor<W>,
) -> Result<(), IRustError> {
// Performance: set_fg only when needed
if self.last_color != Some(color) {
self.raw.set_fg(color)?;
}
for c in out.chars() {
self.write_char(c, cursor)?;
}
self.last_color = Some(color);
Ok(())
}
pub fn write_char_with_color(
&mut self,
c: char,
color: Color,
cursor: &mut super::cursor::Cursor<W>,
) -> Result<(), IRustError> {
// Performance: set_fg only when needed
if self.last_color != Some(color) {
self.raw.set_fg(color)?;
}
self.write_char(c, cursor)?;
self.last_color = Some(color);
Ok(())
}
pub fn write_char(
&mut self,
c: char,
cursor: &mut super::cursor::Cursor<W>,
) -> Result<(), IRustError> {
self.raw.write(c)?;
// Performance: Make sure to not move the cursor if cursor_pos = last_cursor_pos+1 because it moves automatically
if cursor.pos.current_pos.0 == cursor.bound.width - 1 {
cursor.pos.current_pos.0 = 4;
cursor.pos.current_pos.1 += 1;
cursor.goto_internal_pos();
} else {
cursor.pos.current_pos.0 += 1;
// tab move the cursor by 4
// need to adjust the screen cursor
if c == '\t' {
cursor.goto_internal_pos();
}
}
Ok(())
}
pub fn write_at(
&mut self,
s: &str,
x: usize,
y: usize,
cursor: &mut super::cursor::Cursor<W>,
) -> Result<(), IRustError> {
cursor.goto(x, y);
self.raw.write(s)?;
Ok(())
}
pub fn write_at_no_cursor(
&mut self,
s: &str,
color: Color,
x: usize,
y: usize,
cursor: &mut super::cursor::Cursor<W>,
) -> Result<(), IRustError> {
self.raw.set_fg(color)?;
let origin_pos = cursor.pos.current_pos;
self.write_at(s, x, y, cursor)?;
cursor.goto(origin_pos.0, origin_pos.1);
self.raw.reset_color()?;
Ok(())
}
pub fn write_from_terminal_start(
&mut self,
out: &str,
color: Color,
cursor: &mut super::cursor::Cursor<W>,
) -> Result<(), IRustError> {
cursor.goto(0, cursor.pos.current_pos.1);
self.write(out, color, cursor)?;
Ok(())
}
pub fn write_newline(
&mut self,
cursor: &mut super::cursor::Cursor<W>,
buffer: &crate::irust::buffer::Buffer,
) -> Result<(), IRustError> {
cursor.move_to_input_last_row(buffer);
// check for scroll
if cursor.is_at_last_terminal_row() {
self.scroll_up(1, cursor);
}
cursor.move_down(1);
cursor.use_current_row_as_starting_row();
Ok(())
}
pub fn clear(&mut self, cursor: &mut super::cursor::Cursor<W>) -> Result<(), IRustError> {
self.raw.clear(ClearType::All)?;
cursor.pos.starting_pos = (0, 0);
cursor.goto(4, 0);
cursor.bound.reset();
//self.print_input()?;
Ok(())
}
pub fn clear_last_line(
&mut self,
cursor: &mut super::cursor::Cursor<W>,
) -> Result<(), IRustError> {
let origin_pos = cursor.pos.current_pos;
cursor.goto(0, cursor.bound.height - 1);
self.raw.clear(ClearType::CurrentLine)?;
cursor.goto(origin_pos.0, origin_pos.1);
Ok(())
}
pub fn scroll_up(&mut self, n: usize, cursor: &mut super::cursor::Cursor<W>) {
self.raw.scroll_up(n as u16).expect("failed to scroll-up");
cursor.move_up(n as u16);
cursor.pos.starting_pos.1 = cursor.pos.starting_pos.1.saturating_sub(n);
}
}
|
use crate::completions::{
file_completions::file_path_completion, Completer, CompletionOptions, SortBy,
};
use nu_parser::{trim_quotes, FlatShape};
use nu_protocol::{
engine::{EngineState, StateWorkingSet},
Span,
};
use reedline::Suggestion;
use std::sync::Arc;
pub struct CommandCompletion {
engine_state: Arc<EngineState>,
flattened: Vec<(Span, FlatShape)>,
flat_idx: usize,
flat_shape: FlatShape,
}
impl CommandCompletion {
pub fn new(
engine_state: Arc<EngineState>,
_: &StateWorkingSet,
flattened: Vec<(Span, FlatShape)>,
flat_idx: usize,
flat_shape: FlatShape,
) -> Self {
Self {
engine_state,
flattened,
flat_idx,
flat_shape,
}
}
fn external_command_completion(&self, prefix: &str) -> Vec<String> {
let mut executables = vec![];
let paths = self.engine_state.env_vars.get("PATH");
if let Some(paths) = paths {
if let Ok(paths) = paths.as_list() {
for path in paths {
let path = path.as_string().unwrap_or_default();
if let Ok(mut contents) = std::fs::read_dir(path) {
while let Some(Ok(item)) = contents.next() {
if !executables.contains(
&item
.path()
.file_name()
.map(|x| x.to_string_lossy().to_string())
.unwrap_or_default(),
) && matches!(
item.path()
.file_name()
.map(|x| x.to_string_lossy().starts_with(prefix)),
Some(true)
) && is_executable::is_executable(&item.path())
{
if let Ok(name) = item.file_name().into_string() {
executables.push(name);
}
}
}
}
}
}
}
executables
}
fn complete_commands(
&self,
working_set: &StateWorkingSet,
span: Span,
offset: usize,
find_externals: bool,
) -> Vec<Suggestion> {
let prefix = working_set.get_span_contents(span);
let results = working_set
.find_commands_by_prefix(prefix)
.into_iter()
.map(move |x| Suggestion {
value: String::from_utf8_lossy(&x.0).to_string(),
description: x.1,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
});
let results_aliases =
working_set
.find_aliases_by_prefix(prefix)
.into_iter()
.map(move |x| Suggestion {
value: String::from_utf8_lossy(&x).to_string(),
description: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
});
let mut results = results.chain(results_aliases).collect::<Vec<_>>();
let prefix = working_set.get_span_contents(span);
let prefix = String::from_utf8_lossy(prefix).to_string();
let results = if find_externals {
let results_external =
self.external_command_completion(&prefix)
.into_iter()
.map(move |x| Suggestion {
value: x,
description: None,
extra: None,
span: reedline::Span {
start: span.start - offset,
end: span.end - offset,
},
});
for external in results_external {
if results.contains(&external) {
results.push(Suggestion {
value: format!("^{}", external.value),
description: None,
extra: None,
span: external.span,
})
} else {
results.push(external)
}
}
results
} else {
results
};
results
}
}
impl Completer for CommandCompletion {
fn fetch(
&mut self,
working_set: &StateWorkingSet,
prefix: Vec<u8>,
span: Span,
offset: usize,
pos: usize,
) -> (Vec<Suggestion>, CompletionOptions) {
let last = self
.flattened
.iter()
.rev()
.skip_while(|x| x.0.end > pos)
.take_while(|x| {
matches!(
x.1,
FlatShape::InternalCall
| FlatShape::External
| FlatShape::ExternalArg
| FlatShape::Literal
| FlatShape::String
)
})
.last();
// Options
let options = CompletionOptions::new(true, true, SortBy::LevenshteinDistance);
// The last item here would be the earliest shape that could possible by part of this subcommand
let subcommands = if let Some(last) = last {
self.complete_commands(
working_set,
Span {
start: last.0.start,
end: pos,
},
offset,
false,
)
} else {
vec![]
};
if !subcommands.is_empty() {
return (subcommands, options);
}
let commands = if matches!(self.flat_shape, nu_parser::FlatShape::External)
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall)
|| ((span.end - span.start) == 0)
{
// we're in a gap or at a command
self.complete_commands(working_set, span, offset, true)
} else {
vec![]
};
let cwd = if let Some(d) = self.engine_state.env_vars.get("PWD") {
match d.as_string() {
Ok(s) => s,
Err(_) => "".to_string(),
}
} else {
"".to_string()
};
let preceding_byte = if span.start > offset {
working_set
.get_span_contents(Span {
start: span.start - 1,
end: span.start,
})
.to_vec()
} else {
vec![]
};
// let prefix = working_set.get_span_contents(flat.0);
let prefix = String::from_utf8_lossy(&prefix).to_string();
let output = file_path_completion(span, &prefix, &cwd)
.into_iter()
.map(move |x| {
if self.flat_idx == 0 {
// We're in the command position
if x.1.starts_with('"') && !matches!(preceding_byte.get(0), Some(b'^')) {
let trimmed = trim_quotes(x.1.as_bytes());
let trimmed = String::from_utf8_lossy(trimmed).to_string();
let expanded = nu_path::canonicalize_with(trimmed, &cwd);
if let Ok(expanded) = expanded {
if is_executable::is_executable(expanded) {
(x.0, format!("^{}", x.1))
} else {
(x.0, x.1)
}
} else {
(x.0, x.1)
}
} else {
(x.0, x.1)
}
} else {
(x.0, x.1)
}
})
.map(move |x| Suggestion {
value: x.1,
description: None,
extra: None,
span: reedline::Span {
start: x.0.start - offset,
end: x.0.end - offset,
},
})
.chain(subcommands.into_iter())
.chain(commands.into_iter())
.collect::<Vec<_>>();
(output, options)
}
// Replace base filter with no filter once all the results are already based in the current path
fn filter(&self, _: Vec<u8>, items: Vec<Suggestion>, _: CompletionOptions) -> Vec<Suggestion> {
items
}
}
|
use aoc::*;
use itertools::Itertools;
fn main() -> Result<()> {
let input = input("22.txt")?;
println!("{}", solve2(&input, 119315717514047, 101741582076661, 2020));
Ok(())
}
fn solve(techniques: &str, cards: i128, times: i128, position: i128) -> i128 {
let mut offset: i128 = 0;
let mut increment: i128 = 1;
let inverse = |i| mod_exp(i, cards - 2, cards);
for line in techniques.lines() {
let (end, start) = line.rsplitn(2, ' ').collect_tuple().unwrap();
match (start, end.parse()) {
("cut", Ok(n)) => offset = (offset + increment * n as i128).rem_euclid(cards),
("deal with increment", Ok(n)) => {
increment = (increment * inverse(n)).rem_euclid(cards)
}
("deal into new", Err(_)) => {
increment = (increment * -1).rem_euclid(cards);
offset = (offset + increment).rem_euclid(cards);
}
_ => unreachable!(),
}
}
let increments = mod_exp(increment, times, cards);
let offsets =
(offset * (1 - increments) * inverse((1 - increment).rem_euclid(cards))).rem_euclid(cards);
(offsets + position * increments) % cards
}
fn linearise(techniques: &str, cards: i128) -> (i128, i128) {
techniques.lines().rev().fold((1, 0), |(a, b), technique| {
let (end, start) = technique.rsplitn(2, ' ').collect_tuple().unwrap();
let (a, b) = match (start, end.parse()) {
("cut", Ok(n)) => (a, b + n),
("deal into new", _) => (-a, -b - 1),
("deal with increment", Ok(n)) => {
let inverse = mod_exp(n, cards - 2, cards);
(a * inverse, b * inverse)
}
_ => unreachable!(),
};
(a % cards, b % cards)
})
}
fn solve2(techniques: &str, cards: i128, times: i128, position: i128) -> i128 {
let inverse = |i| mod_exp(i, cards - 2, cards);
let (a, b) = linearise(techniques, cards);
let term1 = position * mod_exp(a, times, cards) % cards;
let tmp = (mod_exp(a, times, cards) - 1) * inverse(a - 1) % cards;
let term2 = b * tmp % cards;
(term1 + term2).rem_euclid(cards)
}
fn mod_exp(base: i128, exponent: i128, modulus: i128) -> i128 {
let mut base = base % modulus;
let mut exponent = exponent;
let mut result = 1;
while exponent > 0 {
if exponent % 2 == 1 {
result = (result * base) % modulus;
}
exponent = exponent >> 1;
base = (base * base) % modulus;
}
result
}
|
/*
types.rs: Holds the types of the Abstract-Syntax-Tree(AST) named RlType, the parser and evaluator work with.
It also defines the Error-type and a ReturnType that is needed for Error-handling.
*/
// load needed sibling-modules
use crate::env::RlEnv;
use crate::types::RlErr::ErrString;
// load needed Rust-Functionality
use std::fmt;
use std::rc::Rc;
/// This type is needed for error handling since in rust that's just possible via return values
pub type RlReturn = Result<RlType, RlErr>;
/**
RLType is the internal Data-Structure of RLisp, it represents the AST.
Please find the README for further information on the types.
*/
#[derive(Debug, Clone)]
pub enum RlType {
Int(i64),
Bool(bool),
Symbol(String),
String(String),
Func(fn(Vec<RlType>) -> RlReturn),
SelfDefinedFunc {
env: RlEnv,
params: Rc<Vec<RlType>>,
body: Rc<RlType>,
},
List(Vec<RlType>),
Nil,
}
/// A Type to define Errors
#[derive(Debug)]
pub enum RlErr {
// Defines Error-type String
ErrString(String),
}
/**
Defines which of the types are an atom: Int, Symbol, String, Nil, Bool, Empty list.
It takes an arbitrary expression and returns a Boolean whether given expression has atomic type.
Arguments: expr - expression of type RLType, that is to be checked
Returns: true if given type is atomic, false otherwise
*/
pub fn is_atom(expr: RlType) -> bool {
match expr {
RlType::Int(_i) => true,
RlType::Symbol(_i) => true,
RlType::Nil => true,
RlType::Bool(_i) => true,
RlType::String(_i) => true,
//RlType::List(l) if l.len() == 0 => true,
_ => false,
}
}
/// Implement the display trait so that errors are shown nicely
impl fmt::Display for RlErr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ErrString(i) => write!(f, "{}", i),
}
}
}
/// Implement the PartialEq trait for the RlType so that comparisons are possible. This is mostly used
/// for the "eq?" functionality of RLisp
impl PartialEq for RlType {
/**
takes reference to Another RLType and checks for equality
1. check if types are matching
2. If types have a value, check if values are matching
Arguments: self - reference to local RLType
other - reference to other RLType to compare with
Returns: bool whether equality is given or not.
*/
fn eq(&self, other: &RlType) -> bool {
match (self, other) {
(RlType::Int(ref a), RlType::Int(ref b)) => a == b,
(RlType::Bool(ref a), RlType::Bool(ref b)) => a == b,
(RlType::Symbol(ref a), RlType::Symbol(ref b)) => a == b,
(RlType::Nil, RlType::Nil) => true,
(RlType::List(ref a), RlType::List(ref b)) => a == b,
(RlType::String(ref a), RlType::String(ref b)) => a == b,
_ => false,
}
}
}
/**
Helper to create ErrString-Instance
Arguments: str - String that should be the errormessage
Returns: new ErrString Instance
*/
pub fn error(str: &str) -> RlErr {
return ErrString(String::from(str));
}
|
use super::*;
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)]
#[repr(transparent)]
pub struct DisplayControl(u16);
impl DisplayControl {
const_new!();
bitfield_int!(u16; 0..=2: u16, display_mode, with_display_mode, set_display_mode);
bitfield_bool!(u16; 4, display_frame1, with_display_frame1, set_display_frame1);
bitfield_bool!(u16; 5, hblank_interval_free, with_hblank_interval_free, set_hblank_interval_free);
bitfield_bool!(u16; 6, obj_vram_1d, with_obj_vram_1d, set_obj_vram_1d);
bitfield_bool!(u16; 7, forced_blank, with_forced_blank, set_forced_blank);
bitfield_bool!(u16; 8, display_bg0, with_display_bg0, set_display_bg0);
bitfield_bool!(u16; 9, display_bg1, with_display_bg1, set_display_bg1);
bitfield_bool!(u16; 10, display_bg2, with_display_bg2, set_display_bg2);
bitfield_bool!(u16; 11, display_bg3, with_display_bg3, set_display_bg3);
bitfield_bool!(u16; 12, display_obj, with_display_obj, set_display_obj);
bitfield_bool!(u16; 13, display_win0, with_display_win0, set_display_win0);
bitfield_bool!(u16; 14, display_win1, with_display_win1, set_display_win1);
bitfield_bool!(u16; 15, display_obj_win, with_display_obj_win, set_display_obj_win);
}
/*
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u16)]
pub enum DisplayMode {
_0 = 0,
_1 = 1,
_2 = 2,
_3 = 3,
_4 = 4,
_5 = 5,
_6 = 6,
_7 = 7,
}
*/
|
use core::cmp;
use core::marker::PhantomData;
use embassy::util::Unborrow;
use embassy_extras::unborrow;
use embedded_hal::blocking::i2c::Read;
use embedded_hal::blocking::i2c::Write;
use embedded_hal::blocking::i2c::WriteRead;
use crate::i2c::{Error, Instance, SclPin, SdaPin};
use crate::pac::gpio::vals::{Afr, Moder, Ot};
use crate::pac::gpio::Gpio;
use crate::pac::i2c;
use crate::time::Hertz;
pub struct I2c<'d, T: Instance> {
phantom: PhantomData<&'d mut T>,
}
impl<'d, T: Instance> I2c<'d, T> {
pub fn new<F>(
pclk: Hertz,
_peri: impl Unborrow<Target = T> + 'd,
scl: impl Unborrow<Target = impl SclPin<T>>,
sda: impl Unborrow<Target = impl SdaPin<T>>,
freq: F,
) -> Self
where
F: Into<Hertz>,
{
unborrow!(scl, sda);
unsafe {
Self::configure_pin(scl.block(), scl.pin() as _, scl.af_num());
Self::configure_pin(sda.block(), sda.pin() as _, sda.af_num());
}
unsafe {
T::regs().cr1().modify(|reg| {
reg.set_pe(false);
reg.set_anfoff(false);
});
}
let timings = Timings::new(pclk, freq.into());
unsafe {
T::regs().timingr().write(|reg| {
reg.set_presc(timings.prescale);
reg.set_scll(timings.scll);
reg.set_sclh(timings.sclh);
reg.set_sdadel(timings.sdadel);
reg.set_scldel(timings.scldel);
});
}
unsafe {
T::regs().cr1().modify(|reg| {
reg.set_pe(true);
});
}
Self {
phantom: PhantomData,
}
}
unsafe fn configure_pin(block: Gpio, pin: usize, af_num: u8) {
let (afr, n_af) = if pin < 8 { (0, pin) } else { (1, pin - 8) };
block.moder().modify(|w| w.set_moder(pin, Moder::ALTERNATE));
block.afr(afr).modify(|w| w.set_afr(n_af, Afr(af_num)));
block.otyper().modify(|w| w.set_ot(pin, Ot::OPENDRAIN));
//block
//.ospeedr()
//.modify(|w| w.set_ospeedr(pin, crate::pac::gpio::vals::Ospeedr::VERYHIGHSPEED));
}
fn master_stop(&mut self) {
unsafe {
T::regs().cr2().write(|w| w.set_stop(i2c::vals::Stop::STOP));
}
}
fn master_read(&mut self, address: u8, length: usize, stop: Stop) {
assert!(length < 256 && length > 0);
// Wait for any previous address sequence to end
// automatically. This could be up to 50% of a bus
// cycle (ie. up to 0.5/freq)
while unsafe { T::regs().cr2().read().start() == i2c::vals::Start::START } {}
// Set START and prepare to receive bytes into
// `buffer`. The START bit can be set even if the bus
// is BUSY or I2C is in slave mode.
unsafe {
T::regs().cr2().modify(|w| {
w.set_sadd((address << 1 | 0) as u16);
w.set_rd_wrn(i2c::vals::RdWrn::READ);
w.set_nbytes(length as u8);
w.set_start(i2c::vals::Start::START);
w.set_autoend(stop.autoend());
});
}
}
fn master_write(&mut self, address: u8, length: usize, stop: Stop) {
assert!(length < 256 && length > 0);
// Wait for any previous address sequence to end
// automatically. This could be up to 50% of a bus
// cycle (ie. up to 0.5/freq)
while unsafe { T::regs().cr2().read().start() == i2c::vals::Start::START } {}
// Set START and prepare to send `bytes`. The
// START bit can be set even if the bus is BUSY or
// I2C is in slave mode.
unsafe {
T::regs().cr2().modify(|w| {
w.set_sadd((address << 1 | 0) as u16);
w.set_add10(i2c::vals::Add::BIT7);
w.set_rd_wrn(i2c::vals::RdWrn::WRITE);
w.set_nbytes(length as u8);
w.set_start(i2c::vals::Start::START);
w.set_autoend(stop.autoend());
});
}
}
fn master_re_start(&mut self, address: u8, length: usize, stop: Stop) {
assert!(length < 256 && length > 0);
unsafe {
T::regs().cr2().modify(|w| {
w.set_sadd((address << 1 | 1) as u16);
w.set_add10(i2c::vals::Add::BIT7);
w.set_rd_wrn(i2c::vals::RdWrn::READ);
w.set_nbytes(length as u8);
w.set_start(i2c::vals::Start::START);
w.set_autoend(stop.autoend());
});
}
}
fn flush_txdr(&self) {
//if $i2c.isr.read().txis().bit_is_set() {
//$i2c.txdr.write(|w| w.txdata().bits(0));
//}
unsafe {
if T::regs().isr().read().txis() {
T::regs().txdr().write(|w| w.set_txdata(0));
}
if T::regs().isr().read().txe() {
T::regs().isr().modify(|w| w.set_txe(true))
}
}
// If TXDR is not flagged as empty, write 1 to flush it
//if $i2c.isr.read().txe().is_not_empty() {
//$i2c.isr.write(|w| w.txe().set_bit());
//}
}
fn wait_txe(&self) -> Result<(), Error> {
loop {
unsafe {
let isr = T::regs().isr().read();
if isr.txe() {
return Ok(());
} else if isr.berr() {
T::regs().icr().write(|reg| reg.set_berrcf(true));
return Err(Error::Bus);
} else if isr.arlo() {
T::regs().icr().write(|reg| reg.set_arlocf(true));
return Err(Error::Arbitration);
} else if isr.nackf() {
T::regs().icr().write(|reg| reg.set_nackcf(true));
self.flush_txdr();
return Err(Error::Nack);
}
}
}
}
fn wait_rxne(&self) -> Result<(), Error> {
loop {
unsafe {
let isr = T::regs().isr().read();
if isr.rxne() {
return Ok(());
} else if isr.berr() {
T::regs().icr().write(|reg| reg.set_berrcf(true));
return Err(Error::Bus);
} else if isr.arlo() {
T::regs().icr().write(|reg| reg.set_arlocf(true));
return Err(Error::Arbitration);
} else if isr.nackf() {
T::regs().icr().write(|reg| reg.set_nackcf(true));
self.flush_txdr();
return Err(Error::Nack);
}
}
}
}
fn wait_tc(&self) -> Result<(), Error> {
loop {
unsafe {
let isr = T::regs().isr().read();
if isr.tc() {
return Ok(());
} else if isr.berr() {
T::regs().icr().write(|reg| reg.set_berrcf(true));
return Err(Error::Bus);
} else if isr.arlo() {
T::regs().icr().write(|reg| reg.set_arlocf(true));
return Err(Error::Arbitration);
} else if isr.nackf() {
T::regs().icr().write(|reg| reg.set_nackcf(true));
self.flush_txdr();
return Err(Error::Nack);
}
}
}
}
}
impl<'d, T: Instance> Read for I2c<'d, T> {
type Error = Error;
fn read(&mut self, address: u8, buffer: &mut [u8]) -> Result<(), Self::Error> {
assert!(buffer.len() < 256 && buffer.len() > 0);
self.master_read(address, buffer.len(), Stop::Automatic);
for byte in buffer {
// Wait until we have received something
self.wait_rxne()?;
//*byte = self.i2c.rxdr.read().rxdata().bits();
unsafe {
*byte = T::regs().rxdr().read().rxdata();
}
}
// automatic STOP
Ok(())
}
}
impl<'d, T: Instance> Write for I2c<'d, T> {
type Error = Error;
fn write(&mut self, address: u8, bytes: &[u8]) -> Result<(), Self::Error> {
// TODO support transfers of more than 255 bytes
assert!(bytes.len() < 256 && bytes.len() > 0);
// I2C start
//
// ST SAD+W
self.master_write(address, bytes.len(), Stop::Software);
for byte in bytes {
// Wait until we are allowed to send data
// (START has been ACKed or last byte when
// through)
self.wait_txe()?;
// Put byte on the wire
//self.i2c.txdr.write(|w| w.txdata().bits(*byte));
unsafe {
T::regs().txdr().write(|w| w.set_txdata(*byte));
}
}
// Wait until the write finishes
self.wait_tc()?;
// Stop
self.master_stop();
Ok(())
}
}
impl<'d, T: Instance> WriteRead for I2c<'d, T> {
type Error = Error;
fn write_read(
&mut self,
address: u8,
bytes: &[u8],
buffer: &mut [u8],
) -> Result<(), Self::Error> {
// TODO support transfers of more than 255 bytes
assert!(bytes.len() < 256 && bytes.len() > 0);
assert!(buffer.len() < 256 && buffer.len() > 0);
// I2C start
//
// ST SAD+W
self.master_write(address, bytes.len(), Stop::Software);
for byte in bytes {
// Wait until we are allowed to send data
// (START has been ACKed or last byte went through)
self.wait_txe()?;
// Put byte on the wire
//self.i2c.txdr.write(|w| w.txdata().bits(*byte));
unsafe {
T::regs().txdr().write(|w| w.set_txdata(*byte));
}
}
// Wait until the write finishes before beginning to read.
self.wait_tc()?;
// I2C re-start
//
// SR SAD+R
self.master_re_start(address, buffer.len(), Stop::Automatic);
for byte in buffer {
// Wait until we have received something
self.wait_rxne()?;
//*byte = self.i2c.rxdr.read().rxdata().bits();
unsafe {
*byte = T::regs().rxdr().read().rxdata();
}
}
// automatic STOP
Ok(())
}
}
/// I2C Stop Configuration
///
/// Peripheral options for generating the STOP condition
#[derive(Copy, Clone, PartialEq)]
pub enum Stop {
/// Software end mode: Must write register to generate STOP condition
Software,
/// Automatic end mode: A STOP condition is automatically generated once the
/// configured number of bytes have been transferred
Automatic,
}
impl Stop {
fn autoend(&self) -> i2c::vals::Autoend {
match self {
Stop::Software => i2c::vals::Autoend::SOFTWARE,
Stop::Automatic => i2c::vals::Autoend::AUTOMATIC,
}
}
}
struct Timings {
prescale: u8,
scll: u8,
sclh: u8,
sdadel: u8,
scldel: u8,
}
impl Timings {
fn new(i2cclk: Hertz, freq: Hertz) -> Self {
let i2cclk = i2cclk.0;
let freq = freq.0;
// Refer to RM0433 Rev 7 Figure 539 for setup and hold timing:
//
// t_I2CCLK = 1 / PCLK1
// t_PRESC = (PRESC + 1) * t_I2CCLK
// t_SCLL = (SCLL + 1) * t_PRESC
// t_SCLH = (SCLH + 1) * t_PRESC
//
// t_SYNC1 + t_SYNC2 > 4 * t_I2CCLK
// t_SCL ~= t_SYNC1 + t_SYNC2 + t_SCLL + t_SCLH
let ratio = i2cclk / freq;
// For the standard-mode configuration method, we must have a ratio of 4
// or higher
assert!(
ratio >= 4,
"The I2C PCLK must be at least 4 times the bus frequency!"
);
let (presc_reg, scll, sclh, sdadel, scldel) = if freq > 100_000 {
// Fast-mode (Fm) or Fast-mode Plus (Fm+)
// here we pick SCLL + 1 = 2 * (SCLH + 1)
// Prescaler, 384 ticks for sclh/scll. Round up then subtract 1
let presc_reg = ((ratio - 1) / 384) as u8;
// ratio < 1200 by pclk 120MHz max., therefore presc < 16
// Actual precale value selected
let presc = (presc_reg + 1) as u32;
let sclh = ((ratio / presc) - 3) / 3;
let scll = (2 * (sclh + 1)) - 1;
let (sdadel, scldel) = if freq > 400_000 {
// Fast-mode Plus (Fm+)
assert!(i2cclk >= 17_000_000); // See table in datsheet
let sdadel = i2cclk / 8_000_000 / presc;
let scldel = i2cclk / 4_000_000 / presc - 1;
(sdadel, scldel)
} else {
// Fast-mode (Fm)
assert!(i2cclk >= 8_000_000); // See table in datsheet
let sdadel = i2cclk / 4_000_000 / presc;
let scldel = i2cclk / 2_000_000 / presc - 1;
(sdadel, scldel)
};
(
presc_reg,
scll as u8,
sclh as u8,
sdadel as u8,
scldel as u8,
)
} else {
// Standard-mode (Sm)
// here we pick SCLL = SCLH
assert!(i2cclk >= 2_000_000); // See table in datsheet
// Prescaler, 512 ticks for sclh/scll. Round up then
// subtract 1
let presc = (ratio - 1) / 512;
let presc_reg = cmp::min(presc, 15) as u8;
// Actual prescale value selected
let presc = (presc_reg + 1) as u32;
let sclh = ((ratio / presc) - 2) / 2;
let scll = sclh;
// Speed check
assert!(
sclh < 256,
"The I2C PCLK is too fast for this bus frequency!"
);
let sdadel = i2cclk / 2_000_000 / presc;
let scldel = i2cclk / 500_000 / presc - 1;
(
presc_reg,
scll as u8,
sclh as u8,
sdadel as u8,
scldel as u8,
)
};
// Sanity check
assert!(presc_reg < 16);
// Keep values within reasonable limits for fast per_ck
let sdadel = cmp::max(sdadel, 2);
let scldel = cmp::max(scldel, 4);
//(presc_reg, scll, sclh, sdadel, scldel)
Self {
prescale: presc_reg,
scll,
sclh,
sdadel,
scldel,
}
}
}
|
#![no_std]
#![no_main]
#![feature(trait_alias)]
#![feature(min_type_alias_impl_trait)]
#![feature(impl_trait_in_bindings)]
#![feature(type_alias_impl_trait)]
#[path = "../example_common.rs"]
mod example_common;
use embassy_stm32::gpio::{Level, Output, Input, Pull, NoPin};
use embedded_hal::digital::v2::{OutputPin, InputPin};
use example_common::*;
use cortex_m_rt::entry;
use stm32h7::stm32h743 as pac;
use embassy_stm32::spi::{Spi, MODE_0, ByteOrder, Config};
use embassy_stm32::time::Hertz;
use embedded_hal::blocking::spi::Transfer;
use stm32h7xx_hal::{rcc, prelude::*};
use embassy_stm32::dac::{Dac, Value, Channel};
#[entry]
fn main() -> ! {
info!("Hello World, dude!");
let pp = pac::Peripherals::take().unwrap();
let pwrcfg = pp.PWR.constrain()
.freeze();
let rcc = pp.RCC.constrain();
let ccdr = rcc
.sys_ck(96.mhz())
.pclk1(48.mhz())
.pclk2(48.mhz())
.pclk3(48.mhz())
.pclk4(48.mhz())
.pll1_q_ck(48.mhz())
.freeze(pwrcfg, &pp.SYSCFG);
let pp = unsafe { pac::Peripherals::steal() };
pp.DBGMCU.cr.modify(|_, w| {
w.dbgsleep_d1().set_bit();
w.dbgstby_d1().set_bit();
w.dbgstop_d1().set_bit();
w.d1dbgcken().set_bit();
w
});
pp.RCC.apb1lenr.modify(|_, w|{
w.dac12en().set_bit();
w
});
pp.RCC.ahb4enr.modify(|_, w| {
w.gpioaen().set_bit();
w.gpioben().set_bit();
w.gpiocen().set_bit();
w.gpioden().set_bit();
w.gpioeen().set_bit();
w.gpiofen().set_bit();
w
});
let p = embassy_stm32::init(Default::default());
let mut dac = Dac::new(p.DAC1, p.PA4, NoPin);
loop {
for v in 0..=255 {
dac.set(Channel::Ch1, Value::Bit8(to_sine_wave(v)));
dac.trigger( Channel::Ch1 );
}
}
}
use micromath::F32Ext;
fn to_sine_wave(v: u8) -> u8 {
if v >= 128 {
// top half
let r = 3.14 * ( (v-128) as f32/ 128.0) ;
(r.sin() * 128.0 + 127.0) as u8
} else {
// bottom half
let r = 3.14 + 3.14 * (v as f32/ 128.0);
(r.sin() * 128.0 + 127.0) as u8
}
}
|
//! Methods for handling malformed HTML input.
//!
//! Methods are used by [Parser](crate::html::parse::Parser).
//!
//! See [ParseOptionsBuilder](crate::html::parse::parse_options::ParseOptionsBuilder)
//! for details on how to select these handlers.
use log::log;
#[cfg(test)]
use mockall::automock;
use super::{ParseError, ParserState};
/// Context given to a [MismatchedTagHandler].
/// Includes the entire mutable [ParserState], plus some additional context.
pub struct MismatchedTagHandlerContext<'a> {
/// The name of the open tag.
pub open_tag_name: &'a str,
/// The name of the close tag that did not match the open tag name.
pub close_tag_name: &'a str,
/// The mutable state of the parser when the tag mismatch was encountered.
pub parser_state: &'a mut ParserState,
}
/// Trait representing a method for handling a closing tag that does not have the same name as the corresponding opening tag.
/// This happens when the HTML input is malformed.
///
/// Examples:
///
/// 1. End tag is incorrect:
///
/// ```html
/// <html>
/// </body> <!-- This should have been closing 'html'. -->
/// ```
///
/// 2. End tag is missing entirely:
///
/// ```html
/// <html>
/// <div>
/// </html> <!-- The closing 'div' is missing, which makes it seem like this 'html' closing tag is mismatched with the opening 'div'. -->
/// ```
#[cfg_attr(test, automock)]
pub trait MismatchedTagHandler {
/// Performs the handling by (optionally) changing the [ParserState](crate::html::parse::ParserState).
///
/// If the result is Ok, the parser will continue. If the result is Err, it will return the error immediately.
fn invoke<'a>(&self, context: MismatchedTagHandlerContext<'a>) -> Result<(), ParseError>;
}
/// Returns an error that an end tag did not match an opening tag.
/// This cuts the parsing short and causes the parser to immediately return the error.
pub struct ErrorMismatchedTagHandler {}
impl ErrorMismatchedTagHandler {
/// Creates a new [ErrorMismatchedTagHandler].
pub fn new() -> Self {
Self {}
}
}
impl Default for ErrorMismatchedTagHandler {
fn default() -> Self {
ErrorMismatchedTagHandler::new()
}
}
impl MismatchedTagHandler for ErrorMismatchedTagHandler {
fn invoke(&self, context: MismatchedTagHandlerContext) -> Result<(), ParseError> {
Err(ParseError::EndTagMismatch {
end_name: context.close_tag_name.to_string(),
open_name: context.open_tag_name.to_string(),
})
}
}
/// Does not error, and performs no special handling, effectively ignoring the mismatching tag.
/// Optionally logs a message that a tag mismatch has occurred.
///
/// ```rust
/// # use std::error::Error;
/// # fn main() -> Result<(), Box<dyn Error>> {
/// use skyscraper::html::parse::{Parser, ParseOptionsBuilder, malformed_html_handlers::VoidMismatchedTagHandler};
/// let input = r#"
/// <html>
/// <body>
/// hello
/// <div>
/// there
/// </body>
/// friend
/// </span>"#;
///
/// let options = ParseOptionsBuilder::new()
/// .with_mismatched_tag_handler(Box::new(VoidMismatchedTagHandler::new(None)))
/// .build();
///
/// let html = Parser::new(options).parse(input)?;
///
/// let output = r#"<html>
/// <body>
/// hello
/// <div>
/// there
/// </div>
/// friend
/// </body>
/// </html>
/// "#;
/// assert_eq!(html.to_string(), output);
/// # Ok(())
/// # }
/// ```
pub struct VoidMismatchedTagHandler {
log_level: Option<log::Level>,
}
impl VoidMismatchedTagHandler {
/// Creates a new [VoidMismatchedTagHandler].
///
/// Set `log_level` to `None` if no logs are desired.
pub fn new(log_level: Option<log::Level>) -> Self {
Self { log_level }
}
}
impl MismatchedTagHandler for VoidMismatchedTagHandler {
fn invoke(&self, context: MismatchedTagHandlerContext) -> Result<(), ParseError> {
if let Some(log_level) = self.log_level {
log!(
log_level,
"End tag of {} mismatches opening tag of {}",
context.close_tag_name,
context.open_tag_name
);
}
Ok(())
}
}
/// Attempts to close a missing tag by checking if the parent of the current tag matches the mismatching end tag.
/// If the parent does not match, it ignores the mismatch without performing any additional handling, much like [VoidMismatchedTagHandler].
///
/// ```rust
/// # use std::error::Error;
/// # fn main() -> Result<(), Box<dyn Error>> {
/// use skyscraper::html::parse::{Parser, ParseOptionsBuilder, malformed_html_handlers::CloseMismatchedTagHandler};
/// let input = r#"
/// <html>
/// <body>
/// hello
/// <div>
/// there
/// </body>
/// friend
/// </span>"#;
///
/// let options = ParseOptionsBuilder::new()
/// .with_mismatched_tag_handler(Box::new(CloseMismatchedTagHandler::new(None)))
/// .build();
///
/// let html = Parser::new(options).parse(input)?;
///
/// let output = r#"<html>
/// <body>
/// hello
/// <div>
/// there
/// </div>
/// </body>
/// friend
/// </html>
/// "#;
/// assert_eq!(html.to_string(), output);
/// # Ok(())
/// # }
/// ```
pub struct CloseMismatchedTagHandler {
log_level: Option<log::Level>,
}
impl CloseMismatchedTagHandler {
/// Creates a new [CloseMismatchedTagHandler].
///
/// Set `log_level` to `None` if no logs are desired.
pub fn new(log_level: Option<log::Level>) -> Self {
Self { log_level }
}
}
impl MismatchedTagHandler for CloseMismatchedTagHandler {
fn invoke(&self, context: MismatchedTagHandlerContext) -> Result<(), ParseError> {
if let Some(log_level) = self.log_level {
log!(
log_level,
"End tag of {} mismatches opening tag of {}",
context.close_tag_name,
context.open_tag_name
);
}
let cur_key = context
.parser_state
.arena
.get(context.parser_state.cur_key_o.unwrap())
.unwrap();
if let Some(parent_key) = cur_key.parent() {
// do not move up to the root node, otherwise the parser will attempt to move up past it.
if parent_key != context.parser_state.root_key_o.unwrap() {
let parent = context
.parser_state
.arena
.get(parent_key)
.unwrap()
.get()
.unwrap_tag();
// if the parent name matches the end tag of the mistmatch, assume the parent's end tag is missing and move up to close it.
// otherwise, ignore the mismatch and hope for the best.
if parent.name == context.close_tag_name {
if let Some(log_level) = self.log_level {
log!(
log_level,
"Parent tag matches end tag {}; assuming end tag is missing, closing current tag and parent tag",
parent.name,
);
}
context.parser_state.cur_key_o = Some(parent_key);
}
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use indoc::indoc;
use super::*;
use crate::html::parse::{parse_options::ParseOptionsBuilder, Parser};
const HTML_MISMATCHED_END_TAG: &'static str = r#"
<html>
<body>
foo
<span>
bar
</div>
<b>
baz
</b>
bat
</body>
<footer>
foot
</footer>
</html>
"#;
const HTML_MISSING_END_TAG: &'static str = r#"
<html>
<body>
foo
<span>
bar
<b>
baz
</b>
bat
</body>
<footer>
foot
</footer>
</html>
"#;
#[test]
fn error_handler_should_err_for_missing_end_tag() {
// arrange
let parse_options = ParseOptionsBuilder::new()
.with_mismatched_tag_handler(Box::new(ErrorMismatchedTagHandler::new()))
.build();
let parser = Parser::new(parse_options);
// act
let result = parser.parse(HTML_MISSING_END_TAG);
// assert
assert!(matches!(result, Err(ParseError::EndTagMismatch { .. })));
if let Err(ParseError::EndTagMismatch {
end_name,
open_name,
}) = result
{
assert_eq!(open_name, String::from("span"));
assert_eq!(end_name, String::from("body"));
}
}
#[test]
fn error_handler_should_err_for_mismatched_end_tag() {
// arrange
let parse_options = ParseOptionsBuilder::new()
.with_mismatched_tag_handler(Box::new(ErrorMismatchedTagHandler::new()))
.build();
let parser = Parser::new(parse_options);
// act
let result = parser.parse(HTML_MISMATCHED_END_TAG);
// assert
assert!(matches!(result, Err(ParseError::EndTagMismatch { .. })));
if let Err(ParseError::EndTagMismatch {
end_name,
open_name,
}) = result
{
assert_eq!(open_name, String::from("span"));
assert_eq!(end_name, String::from("div"));
}
}
#[test]
fn void_handler_should_ignore_mismatch_for_missing_end_tag() {
// arrange
let parse_options = ParseOptionsBuilder::new()
.with_mismatched_tag_handler(Box::new(VoidMismatchedTagHandler::new(None)))
.build();
let parser = Parser::new(parse_options);
// act
let result = parser.parse(HTML_MISSING_END_TAG).unwrap();
// assert
let output = result.to_string();
let expected_output = indoc!(
r#"
<html>
<body>
foo
<span>
bar
<b>
baz
</b>
bat
</span>
<footer>
foot
</footer>
</body>
</html>
"#
);
assert_eq!(output, expected_output);
}
#[test]
fn void_handler_should_ignore_mismatch_for_mismatched_end_tag() {
// arrange
let parse_options = ParseOptionsBuilder::new()
.with_mismatched_tag_handler(Box::new(VoidMismatchedTagHandler::new(None)))
.build();
let parser = Parser::new(parse_options);
// act
let result = parser.parse(HTML_MISMATCHED_END_TAG).unwrap();
// assert
let output = result.to_string();
let expected_output = indoc!(
r#"
<html>
<body>
foo
<span>
bar
</span>
<b>
baz
</b>
bat
</body>
<footer>
foot
</footer>
</html>
"#
);
assert_eq!(output, expected_output);
}
#[test]
fn close_handler_should_close_both_tags_for_missing_end_tag() {
// arrange
let parse_options = ParseOptionsBuilder::new()
.with_mismatched_tag_handler(Box::new(CloseMismatchedTagHandler::new(None)))
.build();
let parser = Parser::new(parse_options);
// act
let result = parser.parse(HTML_MISSING_END_TAG).unwrap();
// assert
let output = result.to_string();
let expected_output = indoc!(
r#"
<html>
<body>
foo
<span>
bar
<b>
baz
</b>
bat
</span>
</body>
<footer>
foot
</footer>
</html>
"#
);
assert_eq!(output, expected_output);
}
#[test]
fn close_handler_should_ignore_mismatch_for_mismatched_end_tag() {
// arrange
let parse_options = ParseOptionsBuilder::new()
.with_mismatched_tag_handler(Box::new(CloseMismatchedTagHandler::new(None)))
.build();
let parser = Parser::new(parse_options);
// act
let result = parser.parse(HTML_MISMATCHED_END_TAG).unwrap();
// assert
let output = result.to_string();
let expected_output = indoc!(
r#"
<html>
<body>
foo
<span>
bar
</span>
<b>
baz
</b>
bat
</body>
<footer>
foot
</footer>
</html>
"#
);
assert_eq!(output, expected_output);
}
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use crate::utils::{are_equal, EvaluationResult};
use winterfell::math::{fields::f128::BaseElement, FieldElement};
/// The number of rounds is set to 14 to provide 128-bit security level.
/// computed using algorithm 7 from https://eprint.iacr.org/2020/1143.pdf
const NUM_ROUNDS: usize = 14;
const STATE_WIDTH: usize = 4;
const CYCLE_LENGTH: usize = 16;
// HASH FUNCTION
// ================================================================================================
/// Implementation of Rescue hash function with a 4 element state and 14 rounds. Accepts a
/// 2-element input, and returns a 2-element digest.
pub fn hash(value: [BaseElement; 2], result: &mut [BaseElement]) {
let mut state = BaseElement::zeroed_vector(STATE_WIDTH);
state[..2].copy_from_slice(&value);
for i in 0..NUM_ROUNDS {
apply_round(&mut state, i);
}
result.copy_from_slice(&state[..2]);
}
// TRACE
// ================================================================================================
pub fn apply_round(state: &mut [BaseElement], step: usize) {
// determine which round constants to use
let ark = ARK[step % CYCLE_LENGTH];
// apply first half of Rescue round
apply_sbox(state);
apply_mds(state);
add_constants(state, &ark, 0);
// apply second half of Rescue round
apply_inv_sbox(state);
apply_mds(state);
add_constants(state, &ark, STATE_WIDTH);
}
// CONSTRAINTS
// ================================================================================================
/// when flag = 1, enforces constraints for a single round of Rescue hash functions
pub fn enforce_round<E: FieldElement + From<BaseElement>>(
result: &mut [E],
current: &[E],
next: &[E],
ark: &[E],
flag: E,
) {
// compute the state that should result from applying the first half of Rescue round
// to the current state of the computation
let mut step1 = [E::ZERO; STATE_WIDTH];
step1.copy_from_slice(current);
apply_sbox(&mut step1);
apply_mds(&mut step1);
for i in 0..STATE_WIDTH {
step1[i] += ark[i];
}
// compute the state that should result from applying the inverse for the second
// half for Rescue round to the next step of the computation
let mut step2 = [E::ZERO; STATE_WIDTH];
step2.copy_from_slice(next);
for i in 0..STATE_WIDTH {
step2[i] -= ark[STATE_WIDTH + i];
}
apply_inv_mds(&mut step2);
apply_sbox(&mut step2);
// make sure that the results are equal
for i in 0..STATE_WIDTH {
result.agg_constraint(i, flag, are_equal(step2[i], step1[i]));
}
}
// ROUND CONSTANTS
// ================================================================================================
/// Returns Rescue round constants arranged in column-major form.
pub fn get_round_constants() -> Vec<Vec<BaseElement>> {
let mut constants = Vec::new();
for _ in 0..(STATE_WIDTH * 2) {
constants.push(vec![BaseElement::ZERO; CYCLE_LENGTH]);
}
#[allow(clippy::needless_range_loop)]
for i in 0..CYCLE_LENGTH {
for j in 0..(STATE_WIDTH * 2) {
constants[j][i] = ARK[i][j];
}
}
constants
}
// HELPER FUNCTIONS
// ================================================================================================
#[inline(always)]
#[allow(clippy::needless_range_loop)]
fn add_constants(state: &mut [BaseElement], ark: &[BaseElement], offset: usize) {
for i in 0..STATE_WIDTH {
state[i] += ark[offset + i];
}
}
#[inline(always)]
#[allow(clippy::needless_range_loop)]
fn apply_sbox<E: FieldElement>(state: &mut [E]) {
for i in 0..STATE_WIDTH {
state[i] = state[i].exp(ALPHA.into());
}
}
#[inline(always)]
#[allow(clippy::needless_range_loop)]
fn apply_inv_sbox(state: &mut [BaseElement]) {
for i in 0..STATE_WIDTH {
state[i] = state[i].exp(INV_ALPHA);
}
}
#[inline(always)]
#[allow(clippy::needless_range_loop)]
fn apply_mds<E: FieldElement + From<BaseElement>>(state: &mut [E]) {
let mut result = [E::ZERO; STATE_WIDTH];
let mut temp = [E::ZERO; STATE_WIDTH];
for i in 0..STATE_WIDTH {
for j in 0..STATE_WIDTH {
temp[j] = E::from(MDS[i * STATE_WIDTH + j]) * state[j];
}
for j in 0..STATE_WIDTH {
result[i] += temp[j];
}
}
state.copy_from_slice(&result);
}
#[inline(always)]
#[allow(clippy::needless_range_loop)]
fn apply_inv_mds<E: FieldElement + From<BaseElement>>(state: &mut [E]) {
let mut result = [E::ZERO; STATE_WIDTH];
let mut temp = [E::ZERO; STATE_WIDTH];
for i in 0..STATE_WIDTH {
for j in 0..STATE_WIDTH {
temp[j] = E::from(INV_MDS[i * STATE_WIDTH + j]) * state[j];
}
for j in 0..STATE_WIDTH {
result[i] += temp[j];
}
}
state.copy_from_slice(&result);
}
// RESCUE CONSTANTS
// ================================================================================================
const ALPHA: u32 = 3;
const INV_ALPHA: u128 = 226854911280625642308916371969163307691;
const MDS: [BaseElement; STATE_WIDTH * STATE_WIDTH] = [
BaseElement::new(340282366920938463463374557953744960808),
BaseElement::new(1080),
BaseElement::new(340282366920938463463374557953744961147),
BaseElement::new(40),
BaseElement::new(340282366920938463463374557953744932377),
BaseElement::new(42471),
BaseElement::new(340282366920938463463374557953744947017),
BaseElement::new(1210),
BaseElement::new(340282366920938463463374557953744079447),
BaseElement::new(1277640),
BaseElement::new(340282366920938463463374557953744532108),
BaseElement::new(33880),
BaseElement::new(340282366920938463463374557953720263017),
BaseElement::new(35708310),
BaseElement::new(340282366920938463463374557953733025977),
BaseElement::new(925771),
];
const INV_MDS: [BaseElement; STATE_WIDTH * STATE_WIDTH] = [
BaseElement::new(18020639985667067681479625318803400939),
BaseElement::new(119196285838491236328880430704594968577),
BaseElement::new(231409255903369280423951003551679307334),
BaseElement::new(311938552114349342492438056332412246225),
BaseElement::new(245698978747161380010236204726851770228),
BaseElement::new(32113671753878130773768090116517402309),
BaseElement::new(284248318938217584166130208504515171073),
BaseElement::new(118503764402619831976614612559605579465),
BaseElement::new(42476948408512208745085164298752800413),
BaseElement::new(283594571303717652525183978492772054516),
BaseElement::new(94047455979774690913009073579656179991),
BaseElement::new(260445758149872374743470899536308888155),
BaseElement::new(12603050626701424572717576220509072651),
BaseElement::new(250660673575506110946271793719013778251),
BaseElement::new(113894235293153614657151429548304212092),
BaseElement::new(303406774346515776750608316419662860081),
];
pub const ARK: [[BaseElement; STATE_WIDTH * 2]; CYCLE_LENGTH] = [
[
BaseElement::new(252629594110556276281235816992330349983),
BaseElement::new(121163867507455621442731872354015891839),
BaseElement::new(244623479936175870778515556108748234900),
BaseElement::new(181999122442017949289616572388308120964),
BaseElement::new(130035663054758320517176088024859935575),
BaseElement::new(274932696133623013607933255959111946013),
BaseElement::new(130096286077538976127585373664362805864),
BaseElement::new(209506446014122131232133742654202790201),
],
[
BaseElement::new(51912929769931267810162308005565017268),
BaseElement::new(202610584823002946089528994694473145326),
BaseElement::new(295992101426532309592836871256175669136),
BaseElement::new(313404555247438968545340310449654540090),
BaseElement::new(137671644572045862038757754124537020379),
BaseElement::new(29113322527929260506148183779738829778),
BaseElement::new(98634637270536166954048957710629281939),
BaseElement::new(90484051915535813802492401077197602516),
],
[
BaseElement::new(193753019093186599897082621380539177732),
BaseElement::new(88328997664086495053801384396180288832),
BaseElement::new(134379598544046716907663161480793367313),
BaseElement::new(50911186425769400405474055284903795891),
BaseElement::new(12945394282446072785093894845750344239),
BaseElement::new(110650301505380365788620562912149942995),
BaseElement::new(154214463184362737046953674082326221874),
BaseElement::new(306646039504788072647764955304698381135),
],
[
BaseElement::new(279745705918489041552127329708931301079),
BaseElement::new(111293612078035530300709391234153848359),
BaseElement::new(18110020378502034462498434861690576309),
BaseElement::new(41797883582559360517115865611622162330),
BaseElement::new(333888808893608021579859508112201825908),
BaseElement::new(291192643991850989562610634125476905625),
BaseElement::new(115042354025120848770557866862388897952),
BaseElement::new(281483497320099569269754505499721335457),
],
[
BaseElement::new(172898111753678285350206449646444309824),
BaseElement::new(202661860135906394577472615378659980424),
BaseElement::new(141885268042225970011312316000526746741),
BaseElement::new(270195331267041521741794476882482499817),
BaseElement::new(196457080224171120865903216527675657315),
BaseElement::new(56730777565482395039564396246195716949),
BaseElement::new(4886253806084919544862202000090732791),
BaseElement::new(147384194551383352824518757380733021990),
],
[
BaseElement::new(119476237236248181092343711369608370324),
BaseElement::new(182869361251406039022577235058473348729),
BaseElement::new(45308522364899994411952744852450066909),
BaseElement::new(15438528253368638146901598290564135576),
BaseElement::new(130060283207960095436997328133261743365),
BaseElement::new(83953475955438079154228277940680487556),
BaseElement::new(328659226769709797512044291035930357326),
BaseElement::new(228749522131871685132212950281473676382),
],
[
BaseElement::new(46194972462682851176957413491161426658),
BaseElement::new(296333983305826854863835978241833143471),
BaseElement::new(138957733159616849361016139528307260698),
BaseElement::new(67842086763518777676559492559456199109),
BaseElement::new(45580040156133202522383315452912604930),
BaseElement::new(67567837934606680937620346425373752595),
BaseElement::new(202860989528104560171546683198384659325),
BaseElement::new(22630500510153322451285114937258973361),
],
[
BaseElement::new(324160761097464842200838878419866223614),
BaseElement::new(338466547889555546143667391979278153877),
BaseElement::new(189171173535649401433078628567098769571),
BaseElement::new(162173266902020502126600904559755837464),
BaseElement::new(136209703129442038834374731074825683052),
BaseElement::new(61998071517031804812562190829480056772),
BaseElement::new(307309080039351604461536918194634835054),
BaseElement::new(26708622949278137915061761772299784349),
],
[
BaseElement::new(129516553661717764361826568456881002617),
BaseElement::new(224023580754958002183324313900177991825),
BaseElement::new(17590440203644538688189654586240082513),
BaseElement::new(135610063062379124269847491297867667710),
BaseElement::new(146865534517067293442442506551295645352),
BaseElement::new(238139104484181583196227119098779158429),
BaseElement::new(39300761479713744892853256947725570060),
BaseElement::new(54114440355764484955231402374312070440),
],
[
BaseElement::new(222758070305343916663075833184045878425),
BaseElement::new(323840793618712078836672915700599856701),
BaseElement::new(103586087979277053032666296091805459741),
BaseElement::new(160263698024385270625527195046420579470),
BaseElement::new(76620453913654705501329735586535761337),
BaseElement::new(117793948142462197480091377165008040465),
BaseElement::new(86998218841589258723143213495722487114),
BaseElement::new(203188618662906890442620821687773659689),
],
[
BaseElement::new(313098786815741054633864043424353402357),
BaseElement::new(133085673687338880872979866135939079867),
BaseElement::new(219888424885634764555580944265544343421),
BaseElement::new(5893221169005427793512575133564978746),
BaseElement::new(123830602624063632344313821515642988189),
BaseElement::new(99030942908036387138287682010525589136),
BaseElement::new(181549003357535890945363082242256699137),
BaseElement::new(152424978799328476472358562493335008209),
],
[
BaseElement::new(274481943862544603168725464029979191673),
BaseElement::new(4975004592976331754728718693838357226),
BaseElement::new(101850445399221640701542169338886750079),
BaseElement::new(230325699922192981509673754024218912397),
BaseElement::new(50419227750575087142720761582056939006),
BaseElement::new(112444234528764731925178653200320603078),
BaseElement::new(312169855609816651638877239277948636598),
BaseElement::new(204255114617024487729019111502542629940),
],
[
BaseElement::new(95797476952346525817251811755749179939),
BaseElement::new(306977388944722094681694167558392710189),
BaseElement::new(300754874465668732709232449646112602172),
BaseElement::new(25567836410351071106804347269705784680),
BaseElement::new(129659188855548935155840545784705385753),
BaseElement::new(228441586459539470069565041053012869566),
BaseElement::new(178382533299631576605259357906020320778),
BaseElement::new(274458637266680353971597477639962034316),
],
[
BaseElement::new(280059913840028448065185235205261648486),
BaseElement::new(246537412674731137211182698562269717969),
BaseElement::new(259930078572522349821084822750913159564),
BaseElement::new(186061633995391650657311511040160727356),
BaseElement::new(179777566992900315528995607912777709520),
BaseElement::new(209753365793154515863736129686836743468),
BaseElement::new(270445008049478596978645420017585428243),
BaseElement::new(70998387591825316724846035292940615733),
],
[BaseElement::ZERO; 8],
[BaseElement::ZERO; 8],
];
|
use std::net::TcpListener;
use options::Options;
use threadpool::ThreadPool;
use handler::ConnectionHandler;
pub struct WebServer {
addr: String,
pool: ThreadPool,
}
impl WebServer {
pub fn new(options: Options) -> WebServer {
WebServer {
addr: options.addr,
pool: ThreadPool::new(options.workers)
}
}
pub fn start(&self) {
let listener = TcpListener::bind(&self.addr).unwrap();
for connection in listener.incoming() {
let connection = connection.unwrap();
self.process(|| ConnectionHandler::handle(connection));
}
}
fn process<F>(&self, handler: F)
where F: FnOnce() + Send + 'static {
self.pool.execute(handler);
}
}
|
use crate::part::Part;
use std::fs::File;
use std::io::{BufRead, BufReader};
fn part1(earliest: i64, ids: &[i64]) -> i64 {
let (id, departure) = ids
.iter()
.map(|id| (id, id * (earliest / id + 1)))
.min_by(|(_, t1), (_, t2)| t1.cmp(t2))
.unwrap();
id * (departure - earliest)
}
// Chinese remainder theorem - taken from Rosetta Code
fn egcd(a: i64, b: i64) -> (i64, i64, i64) {
if a == 0 {
(b, 0, 1)
} else {
let (g, x, y) = egcd(b % a, a);
(g, y - (b / a) * x, x)
}
}
fn mod_inv(x: i64, n: i64) -> Option<i64> {
let (g, x, _) = egcd(x, n);
if g == 1 {
Some((x % n + n) % n)
} else {
None
}
}
fn chinese_remainder(residues: &[i64], modulii: &[i64]) -> Option<i64> {
let prod = modulii.iter().product::<i64>();
let mut sum = 0;
for (&residue, &modulus) in residues.iter().zip(modulii) {
let p = prod / modulus;
sum += residue * mod_inv(p, modulus)? * p
}
Some(sum % prod)
}
//
fn part2(ids: &[i64]) -> i64 {
let congruences = ids
.iter()
.enumerate()
.filter(|(_, &n)| n > 0)
.map(|(i, n)| (n - i as i64, n))
.collect::<Vec<_>>();
let modulii = congruences.iter().map(|(_, &m)| m).collect::<Vec<_>>();
let residues = congruences
.iter()
.map(|(r, _)| *r as i64)
.collect::<Vec<_>>();
chinese_remainder(&residues, &modulii).unwrap()
}
pub fn run(part: Part, input_path: &str) -> i64 {
let f = File::open(input_path).expect("failed to open input file");
let reader = BufReader::new(f);
let input = reader.lines().map(|s| s.unwrap()).collect::<Vec<_>>();
let earliest = input[0].parse().unwrap();
match part {
Part::Part1 => part1(
earliest,
&input[1]
.split(',')
.filter(|&s| s != "x")
.map(|s| s.parse().unwrap())
.collect::<Vec<_>>(),
) as i64,
Part::Part2 => part2(
&input[1]
.split(',')
.map(|s| if s != "x" { s.parse().unwrap() } else { 0 })
.collect::<Vec<_>>(),
) as i64,
}
}
|
pub mod revision;
use super::apparent_pk::ApparentPrimaryKey;
use apllodb_storage_engine_interface::ColumnName;
use revision::Revision;
use serde::{Deserialize, Serialize};
/// Primary key with revision.
/// Used for Immutable DML.
#[derive(Clone, PartialEq, Hash, Debug, Serialize, Deserialize, new)]
pub struct FullPrimaryKey {
apparent_pk: ApparentPrimaryKey,
revision: Revision,
}
impl FullPrimaryKey {
pub fn apparent_pk(&self) -> &ApparentPrimaryKey {
&self.apparent_pk
}
pub fn column_names(&self) -> &[ColumnName] {
self.apparent_pk.column_names()
}
}
|
use tide::Request;
use crate::state::State;
pub(crate) async fn health_check(_req: Request<State>) -> tide::Result {
Ok("".into())
}
|
//! A thin wrapper over [`rustc_hash`] with some extra helper functions.
#![deny(missing_debug_implementations, missing_docs, rust_2018_idioms)]
use std::hash::{BuildHasherDefault, Hash};
pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
/// Returns a map with the given capacity.
pub fn map_with_capacity<K, V>(cap: usize) -> FxHashMap<K, V> {
FxHashMap::with_capacity_and_hasher(cap, BuildHasherDefault::default())
}
/// Returns a set with the given capacity.
pub fn set_with_capacity<K>(cap: usize) -> FxHashSet<K> {
FxHashSet::with_capacity_and_hasher(cap, BuildHasherDefault::default())
}
/// Returns a map with the given elements.
pub fn map<K, V, const N: usize>(xs: [(K, V); N]) -> FxHashMap<K, V>
where
K: Eq + Hash,
{
let mut ret = map_with_capacity(xs.len());
for (k, v) in xs {
assert!(ret.insert(k, v).is_none());
}
ret
}
/// Returns a set with the given elements.
pub fn set<K, const N: usize>(xs: [K; N]) -> FxHashSet<K>
where
K: Eq + Hash,
{
let mut ret = set_with_capacity(xs.len());
for k in xs {
assert!(ret.insert(k));
}
ret
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// SyntheticsDeletedTest : Object containing a deleted Synthetic test ID with the associated deletion timestamp.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SyntheticsDeletedTest {
/// Deletion timestamp of the Synthetic test ID.
#[serde(rename = "deleted_at", skip_serializing_if = "Option::is_none")]
pub deleted_at: Option<String>,
/// The Synthetic test ID deleted.
#[serde(rename = "public_id", skip_serializing_if = "Option::is_none")]
pub public_id: Option<String>,
}
impl SyntheticsDeletedTest {
/// Object containing a deleted Synthetic test ID with the associated deletion timestamp.
pub fn new() -> SyntheticsDeletedTest {
SyntheticsDeletedTest {
deleted_at: None,
public_id: None,
}
}
}
|
// Copyright (c) 2016, <daggerbot@gmail.com>
// This software is available under the terms of the zlib license.
// See COPYING.md for more information.
#![feature(try_from)]
mod aurum {
pub extern crate aurum_linear as linear;
#[cfg(windows)]
pub extern crate aurum_winutil as winutil;
}
#[cfg(all(unix, not(target_os = "macos")))]
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate log;
#[cfg(all(unix, not(target_os = "macos")))]
extern crate wayland_client;
#[cfg(windows)]
mod winapi {
extern crate winapi;
extern crate kernel32;
extern crate user32;
extern crate gdi32;
pub use self::winapi::*;
pub use self::kernel32::*;
pub use self::user32::*;
pub use self::gdi32::*;
}
#[cfg(all(unix, not(target_os = "macos")))]
extern crate x11_dl;
/// Type used by screen and window coordinates within the display subsystem.
pub type Coord = i32;
#[macro_use]
mod error;
pub mod platform;
mod device;
mod display;
mod event;
mod id;
mod imp;
mod pixel_format;
mod window;
#[allow(dead_code)]
mod util;
pub use device::{Device, PixelFormats};
pub use display::Display;
pub use error::{Error, ErrorKind, Result};
pub use event::{Event, PollResult, Timeout};
pub use id::Id;
pub use pixel_format::PixelFormat;
pub use window::{Window, WindowStyle};
|
use crate::Error;
use rustbus::{message_builder::MarshalledMessage, MessageType};
use std::cell::Cell;
use std::rc::Rc;
pub(crate) const POWER: &'static str = "Setting power";
pub(crate) const DISCOVERABLE: &'static str = "Setting discoverable";
pub(crate) fn set_power_cb(
res: MarshalledMessage,
(powered, on, err_str): (Rc<Cell<bool>>, bool, &'static str),
) -> Result<(), Error> {
match res.typ {
MessageType::Reply => {
powered.replace(on);
Ok(())
}
MessageType::Error => Err(Error::DbusReqErr(format!(
"{} call failed: {:?}",
err_str, res
))),
_ => unreachable!(),
}
}
|
#[macro_use] extern crate text_io;
extern crate chrono;
pub mod advent1;
pub mod advent2;
pub mod advent3;
pub mod advent4;
pub mod advent5;
pub mod advent6;
pub mod advent7;
pub mod advent8;
pub mod advent9;
pub mod advent10;
pub mod advent11;
fn main() {
// advent1::advent1a();
// advent1::advent1b();
// advent2::advent2a();
// advent2::advent2b();
// match advent3::advent3a() {
// Ok(()) => {}
// Err(e) => println!("{}", e)
// }
// match advent4::advent4a() {
// Ok(n) => println!("Advent4a: {:?}", n),
// Err(e) => println!("{}", e)
// }
// match advent5::advent5a() {
// Ok(n) => println!("Advent5a: {:?}", n),
// Err(e) => println!("{}", e)
// }
// match advent5::advent5b() {
// Ok(n) => println!("Advent5b: {:?}", n),
// Err(e) => println!("{}", e)
// }
//
// match advent6::advent6a() {
// Ok(n) => println!("Advent6a: {:?}", n),
// Err(e) => println!("{}", e)
// }
//
// match advent6::advent6b() {
// Ok(n) => println!("Advent6b: {:?}", n),
// Err(e) => println!("{}", e)
// }
match advent7::advent7a() {
Ok(n) => println!("Advent7a: {:?}", n),
Err(e) => println!("{}", e)
}
match advent7::advent7b() {
Ok(n) => println!("Advent7b: {:?}", n),
Err(e) => println!("{}", e)
}
match advent8::advent8a() {
Ok(n) => println!("Advent8b: {:?}", n),
Err(e) => println!("{}", e)
}
match advent9::advent9a() {
Ok(n) => println!("Advent9a: {:?}", n),
Err(e) => println!("{}", e)
}
match advent9::advent9b() {
Ok(n) => println!("Advent9b: {:?}", n),
Err(e) => println!("{}", e)
}
match advent10::advent10a() {
Ok(n) => println!("Advent10a: {:?}", n),
Err(e) => println!("{}", e)
}
match advent11::advent11a() {
Ok(n) => println!("Advent11a: {:?}", n),
Err(e) => println!("{}", e)
}
// match advent11::advent11b() {
// Ok(n) => println!("Advent11b: {:?}", n),
// Err(e) => println!("{}", e)
// }
}
|
extern crate hyperltl;
#[macro_use]
extern crate pest_derive;
pub mod app;
pub mod automata;
pub(crate) mod encoding;
pub mod logic;
pub mod specification;
|
//! Implements iterative deepening, aspiration windows, multi-PV,
//! "searchmoves".
mod aspiration;
mod multipv;
use self::multipv::Multipv;
use std::thread;
use std::time::Duration;
use std::cell::RefCell;
use std::sync::Arc;
use std::sync::mpsc::{channel, Sender, Receiver, TryRecvError};
use regex::Regex;
use uci::{SetOption, OptionDescription};
use moves::Move;
use value::*;
use depth::*;
use ttable::*;
use search_node::SearchNode;
use search::{Search, SearchParams, SearchReport};
// In this module we use the `DeepeningSearch` trait for depth-first
// searches too, so we rename it to avoid confusion.
use search::DeepeningSearch as SearchExecutor;
/// Executes searches with iterative deepening, aspiration windows,
/// multi-PV, and "searchmoves".
///
/// *Iterative deepening* works as follows: A depth-first search is
/// executed with a depth of one ply, then the depth is incremented
/// and another search is executed. This process is repeated until the
/// search is terminated or the requested search depth is reached. In
/// case of a terminated search, the engine can always fall back to
/// the move selected in the last iteration of the search.
///
/// *Aspiration windows* are a way to reduce the search space in the
/// search. The way it works is that we get the value from the last
/// search iteration, calculate a window around it, and use this as
/// alpha-beta bounds for the next search. Because the window is
/// narrower, more beta cutoffs are achieved, and the search takes a
/// shorter time. The drawback is that if the true score is outside
/// this window, then a costly re-search must be made.
///
/// In *multi-PV* mode the engine calculates several principal
/// variations (PV), each one starting with a different first
/// move. This mode is very useful for chess analysis, but can make
/// the search slower.
///
/// *"searchmoves"* is a feature in the UCI protocol, which makes
/// possible to restrict the analysis to a subset of moves
/// only. Again, this is very useful for chess analysis.
///
/// # Usage
///
/// If `T` is a depth-first searcher, instantiate `Deepening<T>` to
/// turn it into a deepening searcher with aspiration windows,
/// multi-PV, and "searchmoves" support.
///
/// **Important note:** `Deepening` requires a proper transposition
/// table to do its work. It can not work with `DummyTtable`.
pub struct Deepening<T: Search> {
params: SearchParams<T::SearchNode>,
search_is_terminated: bool,
previously_searched_nodes: u64,
// The real work will be handed over to `multipv`.
multipv: Multipv<ThreadExecutor<T>>,
// The search depth completed so far.
depth: Depth,
// The value for the root position so far.
value: Value,
// The depth at which the search are likely to be terminated.
depth_target: Depth,
}
impl<T: Search> SearchExecutor for Deepening<T> {
type Ttable = T::Ttable;
type SearchNode = T::SearchNode;
type ReportData = Vec<Variation>;
fn new(tt: Arc<Self::Ttable>) -> Deepening<T> {
Deepening {
params: bogus_params(),
search_is_terminated: false,
previously_searched_nodes: 0,
multipv: Multipv::new(tt),
depth: 0,
value: VALUE_UNKNOWN,
depth_target: DEPTH_MAX,
}
}
fn start_search(&mut self, params: SearchParams<T::SearchNode>) {
assert!(params.depth > 0, "For deepening, depth must be at least 1.");
debug_assert!(params.depth <= DEPTH_MAX);
debug_assert!(params.lower_bound >= VALUE_MIN);
debug_assert!(params.upper_bound <= VALUE_MAX);
debug_assert!(params.lower_bound < params.upper_bound);
debug_assert!(!contains_dups(¶ms.searchmoves));
self.params = params;
self.search_is_terminated = false;
self.previously_searched_nodes = 0;
self.depth = 0;
self.value = VALUE_UNKNOWN;
self.depth_target = DEPTH_MAX;
self.search_next_depth();
}
fn try_recv_report(&mut self) -> Result<SearchReport<Self::ReportData>, TryRecvError> {
let SearchReport {
searched_nodes,
depth,
value,
data,
done,
..
} = try!(self.multipv.try_recv_report());
if value != VALUE_UNKNOWN {
self.value = value;
}
if !data.is_empty() {
debug_assert!(contains_same_moves(&self.params.searchmoves, &data));
self.params.searchmoves = data.clone();
}
let mut report = SearchReport {
search_id: self.params.search_id,
searched_nodes: self.previously_searched_nodes + searched_nodes,
depth: self.depth,
value: self.value,
data: vec![],
done: done,
};
if done && !self.search_is_terminated {
debug_assert_eq!(depth, self.depth + 1);
report.depth = depth;
report.data.extend(self.multipv.extract_variations());
self.previously_searched_nodes = report.searched_nodes;
self.depth = depth;
if depth < self.params.depth {
self.search_next_depth();
report.done = false;
}
}
Ok(report)
}
fn wait_report(&self, duration: Duration) {
self.multipv.wait_report(duration);
}
fn send_message(&mut self, message: &str) {
lazy_static! {
static ref RE: Regex = Regex::new(r"^TARGET_DEPTH=([-+]?\d+)$").unwrap();
}
if let Some(captures) = RE.captures(message) {
self.depth_target = captures
.get(1)
.unwrap()
.as_str()
.parse::<Depth>()
.unwrap();
} else {
if message == "TERMINATE" {
self.search_is_terminated = true;
}
self.multipv.send_message(message);
}
}
}
impl<T: Search> SetOption for Deepening<T> {
fn options() -> Vec<(&'static str, OptionDescription)> {
Multipv::<ThreadExecutor<T>>::options()
}
fn set_option(name: &str, value: &str) {
Multipv::<ThreadExecutor<T>>::set_option(name, value)
}
}
impl<T: Search> Deepening<T> {
fn search_next_depth(&mut self) {
self.multipv
.start_search(SearchParams {
search_id: 0,
depth: self.depth + 1,
..self.params.clone()
});
}
}
/// A helper type. It turns a `Search` into `SearchExecutor`.
struct ThreadExecutor<T: Search> {
tt: Arc<T::Ttable>,
messages_tx: Sender<String>,
reports_rx: Receiver<SearchReport<T::ReportData>>,
reports_tx: Sender<SearchReport<T::ReportData>>,
pending_report: RefCell<Option<SearchReport<T::ReportData>>>,
handle: Option<thread::JoinHandle<Value>>,
}
impl<T: Search> SearchExecutor for ThreadExecutor<T> {
type Ttable = T::Ttable;
type SearchNode = T::SearchNode;
type ReportData = T::ReportData;
fn new(tt: Arc<Self::Ttable>) -> Self {
let (reports_tx, reports_rx) = channel();
Self {
tt: tt,
messages_tx: channel().0,
reports_rx: reports_rx,
reports_tx: reports_tx,
pending_report: RefCell::new(None),
handle: None,
}
}
fn start_search(&mut self, params: SearchParams<Self::SearchNode>) {
let (messages_tx, messages_rx) = channel();
self.messages_tx = messages_tx;
self.handle.take().and_then(|h| h.join().ok());
self.handle = Some(T::spawn(params,
self.tt.clone(),
self.reports_tx.clone(),
messages_rx));
}
fn wait_report(&self, timeout_after: Duration) {
let mut report = self.pending_report.borrow_mut();
if report.is_none() {
*report = self.reports_rx.recv_timeout(timeout_after).ok();
}
}
fn try_recv_report(&mut self) -> Result<SearchReport<Self::ReportData>, TryRecvError> {
self.pending_report
.borrow_mut()
.take()
.ok_or(TryRecvError::Empty)
.or_else(|_| self.reports_rx.try_recv())
}
fn send_message(&mut self, msg: &str) {
self.messages_tx.send(msg.to_string()).ok();
}
}
impl<T: Search> SetOption for ThreadExecutor<T> {
fn options() -> Vec<(&'static str, OptionDescription)> {
T::options()
}
fn set_option(name: &str, value: &str) {
T::set_option(name, value);
}
}
/// A helper function. It returns bogus search parameters.
fn bogus_params<T: SearchNode>() -> SearchParams<T> {
const FEN: &'static str = "7k/8/8/8/8/8/8/7K w - - 0 1";
SearchParams {
search_id: 0,
position: T::from_history(FEN, &mut vec![].into_iter())
.ok()
.unwrap(),
depth: 1,
lower_bound: VALUE_MIN,
upper_bound: VALUE_MAX,
searchmoves: vec![Move::invalid()],
}
}
/// A helper function. It checks if there are moves in the supplied
/// list that occur more than once.
fn contains_dups(list: &Vec<Move>) -> bool {
let mut l = list.clone();
l.sort();
l.dedup();
l.len() < list.len()
}
/// A helper function. It checks if the two supplied lists of moves
/// contain the same moves, possibly in different order.
fn contains_same_moves(list1: &Vec<Move>, list2: &Vec<Move>) -> bool {
let mut list1 = list1.clone();
let mut list2 = list2.clone();
list1.sort();
list2.sort();
list1 == list2
}
|
fn main() {
// Ownership Rules
// 1. Each value in Rust has a variable that's called its owner.
// 2. There can only be one owner at a time
// 3. When the owner goes out of scope, the value will be dropped
//
let s = "hello";
{
let ss = "hello"; // ss is valid from this point forward
// do stuff with ss
} // this scope is now over, and ss is no longer valid
let mut s = String::from("hello");
s.push_str(", world!");
println!("{}", s);
let s1 = String::from("hello");
let s2 = s1; // s1 is moved to s2
// println!("s1 is {}", s1);
let s3 = s2.clone();
println!("s2 is {}, s3 is {}", s2, s3);
let x = 5;
let y = x;
println!("x is {}, y is {}", x, y);
// types with Copy Trait
// 1. All the integer types, such as u32,
// 2. The Boolean type, bool. with values true and false
// 3. All the floating point types, such as f64
// 4. The character type, char.
// 5. Tuples, if they only contain types that are also copy, for example, (i32,i32) is Copy,
// but (i32, String) is not.
let s = String::from("hello"); // s comes into scope
takes_ownership(s); // s's value moves into function and so is no longer valid here
let x = 5; // x comes into scope
makes_copy(x); // but i32 is Copy, so it's okay to still use x afterward
let s1 = gives_ownership(); // moves its return value into s1
let s2 = String::from("hello"); // s2 comes into scope
let s3 = takes_and_gives_back(s2); // s2 is moved into takes_and_gives_back function, which also move its return value into s3
let ss = String::from("hello world");
let (s4, len) = calc_len(ss);
println!("{} len is {}", s4, len);
// ordinary reference
let ref_s = String::from("hello world");
let len_ref = calc_len_ref(&ref_s);
println!("{} len is {}", ref_s, len_ref);
// mutable reference
let mut s5 = String::from("hello");
change_str(&mut s5);
let mut s6 = String::from("hello");
// let r1 = &mut s6; r1 and r2 multi-mutable the same refer and cause data race
{
let r1 = &mut s6;
} // r1 goes out of scope, so we can make a new reference with no problem
let r2 = &mut s6;
// println!("r1 is {}", r1);
println!("r2 is {}", r2);
// data race occurs:
// 1. Two or more pointers access the same data at the some time
// 2. At least one of the pointers is being used to write to the data
// 3. There's no mechanism being used to synchronize access to the data
let mut s7 = String::from("hello");
let r1 = &s7; // No problem
let r2 = &s7; // No problem
// let r3 = &mut s7; // BIG PROBLEM
println!("r1 is {}", r1);
println!("r2 is {}", r2);
let r3 = &mut s7; // no problem, mutable refer is last used with no other immutable reference
println!("r3 is {}", r3);
// let d = dangle();
let d = no_dangle();
// 1. At any given time, you can have either one mutalbe reference or any number of immutalbe
// references
// 2. References must always be valid
slice_test();
} // Here , x goes out of scope, then s , biz s's value was moved, nothing special happens
// s3 goes out of scope and is dropped
// s2 goes out of scope but was moved, so nothing happens
// s1 goes out of scope and is dropped
fn takes_ownership(some_str: String) {
// some_str comes int scope
println!("{}", some_str);
} // Here, some_str goes out of scope and `drop` si callled and memory is freed
fn makes_copy(some_int: i32) {
// some_integer comes into scope
println!("{}", some_int);
} // Here, some_int goes out of scope. nothing special happens
// some_str is returned and moves out to the calling function
fn gives_ownership() -> String {
let some_str = String::from("hello"); // some_str comes into scope
some_str // move its return value into function that calls it
}
// take a String and return one
fn takes_and_gives_back(a_string: String) -> String {
// a_string comes into scope
a_string // a_string is returned and moves out to the calling function
}
fn calc_len(s: String) -> (String, usize) {
// s is moved into calc_len function
let len = s.len();
(s, len) //s,len is moved into funtion that calls it
}
fn calc_len_ref(s: &String) -> usize {
s.len()
} // Here, s goeos out of scope, but bz it doesnot have ownership of what it refers to, nothing happens
fn change_str(some_str: &mut String) {
some_str.push_str(", world");
}
// fn dangle() -> &String {
// return s a reference to String
// let s = String::from("hello"); // s is a new String
// &s // returns a reference to the String, s
// } // Here, s goes out of scope, and is dropped, Its memory goes away , Danger!!!
fn no_dangle() -> String {
let s = String::from("hello");
s
}
fn slice_test() {
let mut s = String::from("hello world");
let word = first_word(&s); //word will get the value 5;
println!("first word tail index of {} is {}", s, word);
s.clear(); // thie empties the String, making it equal to ""
// word still has the value 5 here, but there is no more Strint that
let mut ss = String::from("hello world");
let f = first_word_slice(&ss); // immutable borrow
// ss.clear(); mutable borrow
println!("first word implemented by slice of string {} is {}", ss, f); // immutable borrow later used here
let f1 = first_word_slice_str(&ss[..]);
println!(
"first word implemented by str slice of string {} is {}",
ss, f1
); // immutable borrow later used here
let sss = "hello"; // sss is a immutable const &str type
let f2 = first_word_slice_str(sss);
println!(
"first word implemented by str slice of string {} is {}",
sss, f2
); //
let s = String::from("hello world");
let hello = &s[0..5];
let world = &s[6..11];
println!("first world of {} is {}", s, hello);
println!("second world of {} is {}", s, world);
let len = s.len();
let slice1 = &[3..len];
println!("slice1 of {} is {:?}", s, slice1);
let slice2 = &[3..];
println!("slice2 of {} is {:?}", s, slice2);
let slice3 = &[0..len];
println!("slice3 of {} is {:?}", s, slice3);
let slice4 = &[..];
println!("slice4 of {} is {:?}", s, slice4);
let a = [1, 2, 3, 4, 5];
let slice = &a[2..4];
for i in slice.iter() {
println!("cur slice is {}", i);
}
}
fn first_word(s: &String) -> usize {
let bytes = s.as_bytes();
for (i, &item) in bytes.iter().enumerate() {
if item == b' ' {
return i;
}
}
s.len()
}
fn first_word_slice(s: &String) -> &str {
let bytes = s.as_bytes();
for (i, &item) in bytes.iter().enumerate() {
if item == b' ' {
return &s[0..i];
}
}
&s[..]
}
fn first_word_slice_str(s: &str) -> &str {
let bytes = s.as_bytes();
for (i, &item) in bytes.iter().enumerate() {
if item == b' ' {
return &s[0..i];
}
}
&s[..]
}
|
use std::fs;
use std::fs::File;
use std::os::unix::ffi::OsStrExt;
use std::path::Path;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
use std::thread;
use std::time::Duration;
use errno;
use libc;
use output::Output;
use rand;
use rand::Rng;
use ::misc::*;
#[ derive (Clone) ]
pub struct AtomicFileWriter {
state: Arc <Mutex <AtomicFileWriterState>>,
}
struct AtomicFileWriterState {
lock_fd: libc::c_int,
temp_dir_path: PathBuf,
temp_files: Vec <(String, PathBuf)>,
delete_files: Vec <PathBuf>,
}
impl AtomicFileWriter {
pub fn new (
output: & Output,
repository_path: & Path,
sleep_time: Option <Duration>,
) -> Result <AtomicFileWriter, String> {
// create or open lock file
let lock_path =
repository_path.join ("lock");
let lock_path_c_str: Vec <u8> =
lock_path
.as_os_str ().as_bytes ()
.clone ().into_iter ()
.chain (b"\0")
.map (|&c| c)
.collect ();
let lock_fd = unsafe {
libc::open (
& lock_path_c_str [0]
as * const u8
as * const i8,
libc::O_CREAT | libc::O_WRONLY | libc::O_TRUNC,
0o0600,
)
};
if lock_fd < 0 {
return Err (
format! (
"Error creating lock file {}: {}",
lock_path.to_string_lossy (),
errno::errno ()));
}
// obtain lock
match sleep_time {
Some (sleep_time) =>
Self::lock_non_blocking (
output,
sleep_time,
lock_fd),
None =>
Self::lock_blocking (
output,
lock_fd),
}.map_err (|error| {
unsafe {
libc::close (lock_fd);
}
format! (
"Error obtaining lock on {}: {}",
lock_path.to_string_lossy (),
error)
}) ?;
// create tmp directory
let temp_dir_path =
repository_path.join ("tmp");
if ! temp_dir_path.exists () {
io_result_with_prefix (
|| format! (
"Error creating tmp directory {}: ",
temp_dir_path.clone ().to_string_lossy ()),
fs::create_dir (
temp_dir_path.clone (),
),
).map_err (
|error| {
unsafe {
libc::close (lock_fd);
}
error
}) ?;
}
Ok (AtomicFileWriter {
state: Arc::new (Mutex::new (
AtomicFileWriterState {
lock_fd: lock_fd,
temp_dir_path: temp_dir_path,
temp_files: Vec::new (),
delete_files: Vec::new (),
},
)),
})
}
fn lock_non_blocking (
output: & Output,
sleep_time: Duration,
lock_fd: libc::c_int,
) -> Result <(), String> {
let output_job =
output_job_start! (
output,
"Waiting for repository lock");
// lock with flock
loop {
let flock_result = unsafe {
libc::flock (
lock_fd,
libc::LOCK_EX | libc::LOCK_NB,
)
};
if flock_result != 0 {
if errno::errno () == errno::Errno (libc::EWOULDBLOCK) {
thread::sleep (
sleep_time);
continue;
}
return Err (
format! (
"{}",
errno::errno ()));
}
break;
}
// lock with fcntl
let mut fcntl_flock =
libc::flock {
l_type: F_WRLCK,
l_whence: libc::SEEK_SET as i16,
l_start: 0,
l_len: 0,
l_pid: 0,
};
let fcntl_result = unsafe {
libc::fcntl (
lock_fd,
libc::F_SETLKW,
& mut fcntl_flock
as * mut libc::flock,
)
};
if fcntl_result != 0 {
return Err (
format! (
"{}",
errno::errno ()));
}
// return
output_job.remove ();
Ok (())
}
fn lock_blocking (
output: & Output,
lock_fd: libc::c_int,
) -> Result <(), String> {
let output_job =
output_job_start! (
output,
"Waiting for repository lock");
// lock with flock
let flock_result = unsafe {
libc::flock (
lock_fd,
libc::LOCK_EX,
)
};
if flock_result != 0 {
return Err (
format! (
"{}",
errno::errno ()));
}
// lock with fcntl
let mut fcntl_flock =
libc::flock {
l_type: F_WRLCK,
l_whence: libc::SEEK_SET as i16,
l_start: 0,
l_len: 0,
l_pid: 0,
};
let fcntl_result = unsafe {
libc::fcntl (
lock_fd,
libc::F_SETLKW,
& mut fcntl_flock
as * mut libc::flock,
)
};
if fcntl_result != 0 {
return Err (
format! (
"{}",
errno::errno ()));
}
// return
output_job.remove ();
Ok (())
}
pub fn create (
& self,
target_path: PathBuf,
) -> Result <File, String> {
let mut self_state =
self.state.lock ().unwrap ();
let temp_file_name: String =
rand::thread_rng ()
.gen_ascii_chars ()
.take (16)
.collect ();
let temp_file_path =
self_state.temp_dir_path.join (
& temp_file_name);
let temp_file =
io_result_with_prefix (
|| format! (
"Error creating temporary file {}: ",
temp_file_path.to_string_lossy ()),
File::create (
& temp_file_path),
) ?;
self_state.temp_files.push (
(
temp_file_name,
target_path,
),
);
Ok (temp_file)
}
pub fn delete (
& self,
delete_path: PathBuf,
) {
let mut self_state =
self.state.lock ().unwrap ();
self_state.delete_files.push (
delete_path)
}
pub fn commit (
& self
) -> Result <(), String> {
let mut self_state =
self.state.lock ().unwrap ();
// sync all temp files
for & (ref temp_file_name, _)
in self_state.temp_files.iter () {
let temp_file = (
io_result_with_prefix (
|| format! (
"Error syncing temp file {}: ",
temp_file_name),
File::open (
self_state.temp_dir_path.join (
temp_file_name)))
) ?;
io_result_with_prefix (
|| format! (
"Error syncing temp file {}: ",
temp_file_name),
temp_file.sync_all ()
) ?;
}
// rename temp files
for & (ref temp_file_name, ref target_path)
in self_state.temp_files.iter () {
let parent_dir =
target_path.parent ().unwrap ();
io_result_with_prefix (
|| format! (
"Error creating target directory {}: ",
parent_dir.to_string_lossy ()),
fs::create_dir_all (
parent_dir),
) ?;
string_result_with_prefix (
|| format! (
"Error renaming temp file {} to {}: ",
temp_file_name,
target_path.to_string_lossy ()),
rename_or_copy_and_delete (
self_state.temp_dir_path.join (
temp_file_name),
target_path)
) ?;
}
self_state.temp_files.clear ();
// delete files
for delete_file_name in self_state.delete_files.iter () {
io_result_with_prefix (
|| format! (
"Error deleting {}: ",
delete_file_name.to_string_lossy ()),
fs::remove_file (
delete_file_name),
) ?;
}
self_state.delete_files.clear ();
// return
Ok (())
}
pub fn changes (
& self,
) -> bool {
let self_state =
self.state.lock ().unwrap ();
! self_state.delete_files.is_empty ()
|| ! self_state.temp_files.is_empty ()
}
}
impl Drop for AtomicFileWriterState {
fn drop (
& mut self,
) {
// remove temporary files and directory
for & (ref temp_file_name, _)
in self.temp_files.iter () {
fs::remove_file (
self.temp_dir_path.join (
temp_file_name),
).unwrap_or (
() // do nothing
);
}
fs::remove_dir (
& self.temp_dir_path,
).unwrap_or (
() // do nothing
);
// release lock
unsafe {
libc::close (
self.lock_fd);
}
}
}
const F_WRLCK: libc::c_short = 1;
// ex: noet ts=4 filetype=rust
|
use crate::service::{error::PostgresManagementServiceError, PostgresManagementService};
use async_trait::async_trait;
use drogue_cloud_admin_service::apps::{AdminService, MemberEntry, Members, TransferOwnership};
use drogue_cloud_database_common::{
auth::ensure_with,
error::ServiceError,
models::{
app::{self, ApplicationAccessor, PostgresApplicationAccessor},
Lock,
},
};
use drogue_cloud_registry_events::EventSender;
use drogue_cloud_service_api::auth::user::{authz::Permission, UserInformation};
#[async_trait]
impl<S> AdminService for PostgresManagementService<S>
where
S: EventSender + Clone,
{
type Error = PostgresManagementServiceError<S::Error>;
async fn transfer(
&self,
identity: &UserInformation,
app_id: String,
transfer: TransferOwnership,
) -> Result<(), Self::Error> {
// pre-flight check
if transfer.new_user.is_empty() {
return Err(ServiceError::BadRequest("Invalid user id".into()).into());
}
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let accessor = PostgresApplicationAccessor::new(&t);
// retrieve app
let app = accessor.get(&app_id, Lock::ForUpdate).await?;
let app = app.ok_or_else(|| ServiceError::NotFound)?;
// ensure we are permitted to do the change
ensure_with(&app, identity, Permission::Owner, || ServiceError::NotFound)?;
// make the change
accessor
.update_transfer(
app.name,
identity.user_id().map(Into::into),
Some(transfer.new_user),
)
.await?;
// commit
t.commit().await?;
// done
Ok(())
}
async fn cancel(&self, identity: &UserInformation, app_id: String) -> Result<(), Self::Error> {
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let accessor = PostgresApplicationAccessor::new(&t);
// retrieve app
let app = accessor.get(&app_id, Lock::ForUpdate).await?;
let app = app.ok_or_else(|| ServiceError::NotFound)?;
// ensure we are permitted to do the change
ensure_with(&app, identity, Permission::Owner, || ServiceError::NotFound)?;
// make the change
accessor
.update_transfer(app.name, identity.user_id().map(Into::into), None)
.await?;
// commit
t.commit().await?;
// done
Ok(())
}
async fn accept(&self, identity: &UserInformation, app_id: String) -> Result<(), Self::Error> {
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let accessor = PostgresApplicationAccessor::new(&t);
// retrieve app
let app = accessor.get(&app_id, Lock::ForUpdate).await?;
let app = app.ok_or_else(|| ServiceError::NotFound)?;
log::debug!(
"Transfer - transfer owner: {:?}, identity: {:?}",
app.transfer_owner,
identity.user_id()
);
// make the change
if app.transfer_owner.as_deref() == identity.user_id() {
accessor
.update_transfer(app.name, identity.user_id().map(Into::into), None)
.await?;
// commit
t.commit().await?;
Ok(())
} else {
Err(ServiceError::NotFound.into())
}
}
async fn get_members(
&self,
identity: &UserInformation,
app_id: String,
) -> Result<Members, Self::Error> {
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let accessor = PostgresApplicationAccessor::new(&t);
// retrieve app
let app = accessor.get(&app_id, Lock::None).await?;
let app = app.ok_or_else(|| ServiceError::NotFound)?;
// ensure we are permitted to perform the operation
ensure_with(&app, identity, Permission::Admin, || ServiceError::NotFound)?;
// get operation
Ok(Members {
resource_version: Some(app.resource_version.to_string()),
members: app
.members
.into_iter()
.map(|(k, v)| (k, MemberEntry { role: v.role }))
.collect(),
})
}
async fn set_members(
&self,
identity: &UserInformation,
app_id: String,
members: Members,
) -> Result<(), Self::Error> {
let mut c = self.pool.get().await?;
let t = c.build_transaction().start().await?;
let accessor = PostgresApplicationAccessor::new(&t);
// retrieve app
let app = accessor.get(&app_id, Lock::ForUpdate).await?;
let app = app.ok_or_else(|| ServiceError::NotFound)?;
if let Some(expected_version) = &members.resource_version {
if expected_version != &app.resource_version.to_string() {
return Err(ServiceError::OptimisticLockFailed.into());
}
}
// ensure we are permitted to perform the operation
ensure_with(&app, identity, Permission::Admin, || ServiceError::NotFound)?;
// set operation
accessor
.set_members(
&app_id,
members
.members
.into_iter()
.map(|(k, v)| (k, app::MemberEntry { role: v.role }))
.collect(),
)
.await
.map(|_| ())?;
// commit
t.commit().await?;
Ok(())
}
}
|
use super::{operate, BytesArgument};
use nu_engine::CallExt;
use nu_protocol::{
ast::{Call, CellPath},
engine::{Command, EngineState, Stack},
Category, Example, PipelineData, ShellError, Signature, Span, Spanned, SyntaxShape, Value,
};
struct Arguments {
pattern: Vec<u8>,
end: bool,
column_paths: Option<Vec<CellPath>>,
all: bool,
}
impl BytesArgument for Arguments {
fn take_column_paths(&mut self) -> Option<Vec<CellPath>> {
self.column_paths.take()
}
}
#[derive(Clone)]
pub struct BytesRemove;
impl Command for BytesRemove {
fn name(&self) -> &str {
"bytes remove"
}
fn signature(&self) -> Signature {
Signature::build("bytes remove")
.required("pattern", SyntaxShape::Binary, "the pattern to find")
.rest(
"rest",
SyntaxShape::CellPath,
"optionally remove bytes by column paths",
)
.switch("end", "remove from end of binary", Some('e'))
.switch("all", "remove occurrences of finding binary", Some('a'))
.category(Category::Bytes)
}
fn usage(&self) -> &str {
"Remove bytes"
}
fn search_terms(&self) -> Vec<&str> {
vec!["search", "shift", "switch"]
}
fn run(
&self,
engine_state: &EngineState,
stack: &mut Stack,
call: &Call,
input: PipelineData,
) -> Result<PipelineData, ShellError> {
let column_paths: Vec<CellPath> = call.rest(engine_state, stack, 1)?;
let column_paths = if column_paths.is_empty() {
None
} else {
Some(column_paths)
};
let pattern_to_remove = call.req::<Spanned<Vec<u8>>>(engine_state, stack, 0)?;
if pattern_to_remove.item.is_empty() {
return Err(ShellError::UnsupportedInput(
"the pattern to remove cannot be empty".to_string(),
pattern_to_remove.span,
));
}
let pattern_to_remove: Vec<u8> = pattern_to_remove.item;
let arg = Arguments {
pattern: pattern_to_remove,
end: call.has_flag("end"),
column_paths,
all: call.has_flag("all"),
};
operate(remove, arg, input, call.head, engine_state.ctrlc.clone())
}
fn examples(&self) -> Vec<Example> {
vec![
Example {
description: "Remove contents",
example: "0x[10 AA FF AA FF] | bytes remove 0x[10 AA]",
result: Some(Value::Binary {
val: vec![0xFF, 0xAA, 0xFF],
span: Span::test_data(),
}),
},
Example {
description: "Remove all occurrences of find binary",
example: "0x[10 AA 10 BB 10] | bytes remove -a 0x[10]",
result: Some(Value::Binary {
val: vec![0xAA, 0xBB],
span: Span::test_data(),
}),
},
Example {
description: "Remove occurrences of find binary from end",
example: "0x[10 AA 10 BB CC AA 10] | bytes remove -e 0x[10]",
result: Some(Value::Binary {
val: vec![0x10, 0xAA, 0x10, 0xBB, 0xCC, 0xAA],
span: Span::test_data(),
}),
},
Example {
description: "Remove all occurrences of find binary in table",
example: "[[ColA ColB ColC]; [0x[11 12 13] 0x[14 15 16] 0x[17 18 19]]] | bytes remove 0x[11] ColA ColC",
result: Some(Value::List {
vals: vec![Value::Record {
cols: vec!["ColA".to_string(), "ColB".to_string(), "ColC".to_string()],
vals: vec![
Value::Binary {
val: vec![0x12, 0x13],
span: Span::test_data(),
},
Value::Binary {
val: vec![0x14, 0x15, 0x16],
span: Span::test_data(),
},
Value::Binary {
val: vec![0x17, 0x18, 0x19],
span: Span::test_data(),
},
],
span: Span::test_data(),
}],
span: Span::test_data(),
}),
},
]
}
}
fn remove(input: &[u8], arg: &Arguments, span: Span) -> Value {
let mut result = vec![];
let remove_all = arg.all;
let input_len = input.len();
let pattern_len = arg.pattern.len();
// Note:
// remove_all from start and end will generate the same result.
// so we'll put `remove_all` relative logic into else clouse.
if arg.end && !remove_all {
let (mut left, mut right) = (
input.len() as isize - arg.pattern.len() as isize,
input.len() as isize,
);
while left >= 0 && input[left as usize..right as usize] != arg.pattern {
result.push(input[right as usize - 1]);
left -= 1;
right -= 1;
}
// append the remaining thing to result, this can be happeneed when
// we have something to remove and remove_all is False.
let mut remain = input[..left as usize].iter().copied().rev().collect();
result.append(&mut remain);
result = result.into_iter().rev().collect();
Value::Binary { val: result, span }
} else {
let (mut left, mut right) = (0, arg.pattern.len());
while right <= input_len {
if input[left..right] == arg.pattern {
left += pattern_len;
right += pattern_len;
if !remove_all {
break;
}
} else {
result.push(input[left]);
left += 1;
right += 1;
}
}
// append the remaing thing to result, this can happened when
// we have something to remove and remove_all is False.
let mut remain = input[left..].to_vec();
result.append(&mut remain);
Value::Binary { val: result, span }
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_examples() {
use crate::test_examples;
test_examples(BytesRemove {})
}
}
|
mod health_check;
pub use health_check::*; |
use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
use std::collections::HashMap;
fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
where P: AsRef<Path>, {
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
fn main() {
let filename = "/home/remy/AOC/2020/10/input";
let mut adapters: Vec<i64> = Vec::new();
if let Ok(lines) = read_lines(filename) {
for line in lines {
if let Ok(line) = line {
adapters.push(line.parse::<i64>().unwrap());
}
}
adapters.sort();
let mut differences: HashMap<i64, i64> = HashMap::new();
let mut iteradpater = adapters.iter();
let mut cur = iteradpater.next();
let mut next = iteradpater.next();
loop {
match (cur, next) {
(None, None) => {
println!("Finished parsing.");
break;
},
(Some(cur), Some(next)) => {
let entry = differences.entry(next-cur).or_insert(1);
*entry += 1;
},
(_, _) => ()
}
cur = next;
next = iteradpater.next();
}
println!("{:?}", differences);
println!("{} * {} = {}", 1, 3, (&differences.get(&3)).unwrap()*(&differences.get(&1)).unwrap());
} else {
println!("Error");
}
} |
use super::utility;
// use std::cell::RefCell;
// use std::collections::VecDeque;
// use std::rc::Rc;
use terminal_size::{terminal_size, Height, Width};
pub mod template_engine;
pub mod text_processing;
pub mod printer;
pub mod components;
#[derive(Debug)]
pub struct TerminalSize {
pub x: u16,
pub y: u16,
}
impl TerminalSize {
pub fn create(x: u16, y: u16) -> Self {
return TerminalSize { x, y };
}
pub fn retrieve() -> Self {
let new_size = terminal_size();
match new_size {
Some((Width(cli_width), Height(cli_height))) => {
return TerminalSize::create(cli_width, cli_height);
}
None => {
panic!("Unable to get terminal Sizes!");
}
}
}
}
pub fn gen_whitespace(width: i32) -> String {
let mut count = 0;
let mut string_result = String::new();
while count < width {
string_result.push_str(" ");
count += 1;
}
return string_result;
}
pub fn gen_newline(height: i32) -> std::string::String {
let mut count = 0;
let mut string_result = String::new();
while count < height {
string_result.push_str("\n");
count += 1;
}
return string_result;
}
type template = Vec<String>;
pub fn rectangle_box(x: i32, y: i32, content: &str) -> template {
let mut rendered_template = Vec::new();
let len_cont = content.len() as i32;
let t_size = x as i32;
let numx = (t_size - len_cont) / 2;
let each_space = gen_whitespace(numx);
let zo = y / 2;
rendered_template.push(gen_newline(zo));
let render = format!("{}{}{}", each_space, content, each_space);
rendered_template.push(render);
rendered_template.push(gen_newline(zo));
return rendered_template;
}
//center
fn recto(x: i32, y: i32, mut content: template) -> template {
let mut rendered_template = Vec::new();
let longest = long_str(&content);
let len_cont = content.get(longest).unwrap().len() as i32;
let t_size = x as i32;
let numx = (t_size - len_cont) / 2;
println!("{}", len_cont);
let each_space = gen_whitespace(numx - 1);
content.iter_mut().for_each(|line| {
let cal_index = (len_cont as i32 - line.len() as i32) as i32;
let newspace = gen_whitespace(cal_index);
let formated = format!("{}+", newspace);
line.push_str(&formated);
});
content
.into_iter()
.for_each(|x| rendered_template.push(format!("{}{}{}", each_space, x, each_space)));
return rendered_template;
}
fn long_str(content: &Vec<String>) -> usize {
let mut capture_index = 0;
let max = content.get(0);
match max {
Some(mut max_val) => {
for (idx, ct) in content.iter().enumerate() {
if ct.len() > max_val.len() {
max_val = ct;
capture_index = idx;
}
}
}
None => capture_index = 0,
}
return capture_index;
}
pub fn parse_in_template(content: &str) -> template {
let len = 0;
// dot.write_x();
let mut render_temp: template = Vec::new();
let print_preset = |line: &str, count: i32| {
let formated_str = format!("{}| {}", count, line);
return format!("{}", formated_str);
};
let mut count_line = 0;
for line in content.lines() {
let line = line.trim();
if line.len() > len as usize {
let muti_lines = utility::split_chunk(line, 100);
for line in muti_lines {
render_temp.push(print_preset(&line, count_line));
count_line += 1;
}
} else {
render_temp.push(print_preset(&line, count_line));
count_line += 1;
}
}
let longest_len = long_str(&render_temp) as i32;
println!("newspace {:?}", render_temp);
return render_temp;
}
#[test]
fn test() {
let content = "apsndjakddajdjadb";
let len_cont = content.len() as i32;
let t_size = TerminalSize::retrieve().x as i32;
let numx = (t_size - len_cont) / 2;
let each_space = gen_whitespace(numx);
print!("{}", gen_newline(5));
for _ in 0..10 {
println!("{}{}{}", each_space, content, each_space);
}
}
#[test]
fn test_highlighter() {
let highlight = |x: &str| {
let style = console::Style::new().on_red();
let new = style.apply_to(x.to_string());
return new;
};
let matcher = regex::Regex::new(r"\?").unwrap();
let content = "bombomb?bombomobb sd\"as2311\" asdas asd?as a?sda da?sd as?d";
let style = console::Style::new().green();
let stylist_content = style.apply_to(content);
let new = content
.to_string()
.replace("s", &highlight("x").to_string());
matcher.replace_all(&stylist_content.to_string(), "apple");
println!("content: {}", new);
let new_one = regex::Regex::new(r#""[a-zA-Z]+\d+""#).unwrap();
println!("quote \" {:?}", new_one.find(content));
}
|
use std::collections::HashMap;
use crate::common::Op2;
use crate::lambdal::{Expr, Imm, Op};
type Closure = HashMap<String, Value>;
#[derive(PartialEq, Eq, Debug, Clone)]
pub enum Value {
VInt(i64),
VBool(bool),
VClosure(Box<Closure>, String, Box<Expr>),
VIntArray(Vec<i64>),
}
fn vint(v: Value) -> i64 {
match v {
Value::VInt(i) => i,
_ => panic!("unreachable -- expected int not {:?}", v),
}
}
fn vbool(v: Value) -> bool {
match v {
Value::VBool(b) => b,
_ => panic!("unreachable -- expected bool not {:?}", v),
}
}
fn vclosure(v: Value) -> (Box<Closure>, String, Box<Expr>) {
match v {
Value::VClosure(ctx, id, e) => (ctx, id, e),
_ => panic!("unreachable -- expected closure not {:?}", v),
}
}
fn vintarray(v: Value) -> Vec<i64> {
match v {
Value::VIntArray(a) => a,
_ => panic!("unreachable -- expected intarray not {:?}", v),
}
}
fn eval_op2(ctx: &Closure, op: Op2, l: &Op, r: &Op) -> Value {
use self::Value::*;
use crate::common::Op2::*;
// all binary ops operate on ints, and at this point have passed
// typechecking
match op {
LT | LTE | GT | GTE | Eq | Add | Sub | Mul => {
let vl = vint(eval_op(ctx, l));
let vr = vint(eval_op(ctx, r));
match op {
LT => VBool(vl < vr),
LTE => VBool(vl <= vr),
GT => VBool(vl > vr),
GTE => VBool(vl >= vr),
Eq => VBool(vl == vr),
Add => VInt(vl + vr),
Sub => VInt(vl - vr),
Mul => VInt(vl * vr),
_ => panic!("unreachable numerical op {:?}", op),
}
}
And | Or | Impl | Iff => {
let vl = vbool(eval_op(ctx, l));
let vr = vbool(eval_op(ctx, r));
match op {
And => VBool(vl && vr),
Or => VBool(vl || vr),
_ => panic!("unreachable logic op {:?}", op),
}
}
}
}
fn subst_imm(ctx: &Closure, id: &str, fix: &Imm, i: &Imm) -> Imm {
use crate::lambdal::Imm::*;
match i {
Bool(b) => Bool(*b),
Int(n) => Int(*n),
Var(x) => {
if x == id {
fix.clone()
} else {
Var(x.clone())
}
}
Fun(vid, e) => {
let e = Box::new(subst_expr(ctx, id, fix, e));
Fun(vid.clone(), e)
}
Fix(vid, e) => {
let e = Box::new(subst_expr(ctx, id, fix, e));
Fix(vid.clone(), e)
}
V | Star => unreachable!("ν or ★ encountered during subst"),
}
}
fn subst_op(ctx: &Closure, id: &str, fix: &Imm, o: &Op) -> Op {
use crate::lambdal::Op::*;
match o {
Op2(op, e1, e2) => {
let e1 = Box::new(subst_op(ctx, id, fix, e1));
let e2 = Box::new(subst_op(ctx, id, fix, e2));
Op2(*op, e1, e2)
}
MkArray(sz, n) => {
let sz = Box::new(subst_imm(ctx, id, fix, sz));
let n = Box::new(subst_imm(ctx, id, fix, n));
MkArray(sz, n)
}
GetArray(iid, idx) => {
let iid = Box::new(subst_imm(ctx, id, fix, iid));
let idx = Box::new(subst_imm(ctx, id, fix, idx));
GetArray(iid, idx)
}
SetArray(iid, idx, v) => {
let iid = Box::new(subst_imm(ctx, id, fix, iid));
let idx = Box::new(subst_imm(ctx, id, fix, idx));
let v = Box::new(subst_imm(ctx, id, fix, v));
SetArray(iid, idx, v)
}
Imm(imm) => Imm(subst_imm(ctx, id, fix, imm)),
}
}
// fixpoint substitution
fn subst_expr(ctx: &Closure, id: &str, fix: &Imm, e: &Expr) -> Expr {
use crate::lambdal::Expr::*;
match e {
If(e1, e2, e3) => {
let e1 = Box::new(subst_imm(ctx, id, fix, e1));
let e2 = Box::new(subst_expr(ctx, id, fix, e2));
let e3 = Box::new(subst_expr(ctx, id, fix, e3));
If(e1, e2, e3)
}
Let(vid, e1, e2) => {
let e1 = Box::new(subst_expr(ctx, id, fix, e1));
let e2 = Box::new(subst_expr(ctx, id, fix, e2));
Let(vid.clone(), e1, e2)
}
App(e1, e2) => {
let e1 = Box::new(subst_imm(ctx, id, fix, e1));
let e2 = Box::new(subst_imm(ctx, id, fix, e2));
App(e1, e2)
}
Op(op) => Op(subst_op(ctx, id, fix, op)),
}
}
fn eval_imm(ctx: &Closure, i: &Imm) -> Value {
use self::Value::*;
use crate::lambdal::Imm::*;
match i {
Bool(b) => VBool(*b),
Int(i) => VInt(*i),
Var(id) => match ctx.get(id) {
Some(v) => v.clone(),
None => panic!("lookup {} in ctx failed: {:?}", id, ctx),
},
Fun(id, e) => VClosure(Box::new(ctx.clone()), id.clone(), e.clone()),
Fix(id, e) => {
let inner = eval(ctx, e);
let (_, iid, ie) = vclosure(inner);
let substituted_exp = Box::new(subst_expr(ctx, id, i, &ie));
VClosure(Box::new(ctx.clone()), iid, substituted_exp)
}
V | Star => unreachable!("ν or ★ encountered during subst"),
}
}
fn eval_op(ctx: &Closure, o: &Op) -> Value {
use self::Value::*;
use crate::lambdal::Op::*;
match o {
Op2(op, e1, e2) => eval_op2(ctx, *op, e1, e2),
MkArray(sz, n) => {
let sz = vint(eval_imm(ctx, sz));
let n = vint(eval_imm(ctx, n));
let mut vec = Vec::with_capacity(sz as usize);
vec.resize(sz as usize, n);
VIntArray(vec)
}
GetArray(iid, idx) => {
let arr = vintarray(eval_imm(ctx, iid));
let idx = vint(eval_imm(ctx, idx));
VInt(arr[idx as usize])
}
SetArray(iid, idx, v) => {
let mut arr = vintarray(eval_imm(ctx, iid));
let idx = vint(eval_imm(ctx, idx));
let v = vint(eval_imm(ctx, v));
arr[idx as usize] = v;
VIntArray(arr)
}
Imm(imm) => eval_imm(ctx, imm),
}
}
fn eval(ctx: &Closure, expr: &Expr) -> Value {
use crate::lambdal::Expr::*;
match expr {
If(cond, e1, e2) => {
if vbool(eval_imm(ctx, cond)) {
eval(ctx, e1)
} else {
eval(ctx, e2)
}
}
App(e1, e2) => {
let v = eval_imm(ctx, e2);
let (ctx, id, e) = vclosure(eval_imm(ctx, e1));
let mut new_ctx = ctx;
new_ctx.insert(id, v);
eval(&new_ctx, &e)
}
Let(id, e1, e2) => {
let v1 = eval(ctx, e1);
let mut new_ctx = ctx.clone();
new_ctx.insert(id.clone(), v1);
eval(&new_ctx, e2)
}
Op(op) => eval_op(ctx, op),
}
}
pub fn interpret(expr: &Expr) -> Value {
let ctx: Closure = HashMap::new();
eval(&ctx, expr)
}
#[cfg(test)]
macro_rules! test_eval(
($s:expr, $v:expr) => { {
use crate::implicit_parse::ProgramParser;
use crate::tok::Tokenizer;
use crate::lambdal;
let input = $s;
let lexer = Tokenizer::new(&input);
let iexpr = ProgramParser::new().parse(input, lexer).unwrap();
let (anf_expr, _) = lambdal::anf(&iexpr).unwrap();
let r = interpret(&anf_expr);
if r != $v {
die!("mismatch {:?} != {:?}", r, $v);
}
} }
);
#[test]
fn eval_results() {
use self::Value::*;
test_eval!("-22", VInt(-22));
test_eval!("let double = (fun n -> n*2) in double 8", VInt(16));
test_eval!(
"let rec factorial = fun x -> if x = 0 then 1 else x * (factorial (x - 1)) in factorial 5",
VInt(120)
);
}
|
/*
chapter 4
syntax and semantics
out-of-bounds access
*/
fn main() {
let a = vec![1, 2, 3];
match a.get(7) {
Some(b) => println!("item 7 is {}", b),
None => println!("sorry, this vector is too short.")
}
}
// output should be:
/*
sorry, this vector is too short.
*/
|
pub mod prelude;
pub mod block;
pub mod blockchain; |
/*
chapter 4
syntax and semantics
*/
struct Point<T> {
x: T,
y: T,
}
fn main() {
let int_origin = Point { x: 1, y: 2 };
let float_origin = Point { x: 0.1, y: 0.2 };
match int_origin {
Point { x, y } => println!("({},{})", x, y),
}
match float_origin {
Point { x, y } => println!("({},{})", x, y),
}
}
// output should be:
/*
*/
|
use std::os::raw::c_void;
use std::path::Path;
use gl;
use image;
use image::DynamicImage::*;
use image::GenericImage;
#[derive(Default)]
pub struct Material {
// TODO!!
name: Option<String>,
}
unsafe fn texture_from_file(path: &str, directory: &str) -> u32 {
let filename = format!("{}/{}", directory, path);
let mut texture_id = 0;
gl::GenTextures(1, &mut texture_id);
let img = image::open(&Path::new(&filename)).expect("Texture failed to load");
let format = match img {
ImageLuma8(_) => gl::RED,
ImageLumaA8(_) => gl::RG,
ImageRgb8(_) => gl::RGB,
ImageRgba8(_) => gl::RGBA,
};
let data = img.raw_pixels();
gl::BindTexture(gl::TEXTURE_2D, texture_id);
gl::TexImage2D(gl::TEXTURE_2D, 0, format as i32, img.width() as i32, img.height() as i32,
0, format, gl::UNSIGNED_BYTE, &data[0] as *const u8 as *const c_void);
gl::GenerateMipmap(gl::TEXTURE_2D);
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_S, gl::REPEAT as i32);
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_WRAP_T, gl::REPEAT as i32);
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::LINEAR_MIPMAP_LINEAR as i32);
gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::LINEAR as i32);
texture_id
}
|
use token::{Token, TokenType, bad_token, tokenize};
use std::collections::HashMap;
use std::sync::Mutex;
lazy_static! {
static ref DEFINED: Mutex<HashMap<String, Vec<Token>>> = Mutex::new(HashMap::new());
}
pub fn preprocess(tokens: Vec<Token>) -> Vec<Token> {
use self::TokenType::*;
let mut v: Vec<Token> = vec![];
let mut tokens = tokens.into_iter();
while tokens.len() != 0 {
let mut t = tokens.next().unwrap();
let macro_name;
if let Ident(ref name) = t.ty {
macro_name = Some(name.clone());
} else {
macro_name = None;
}
if let Some(name) = macro_name {
if let Some(macro_) = DEFINED.lock().unwrap().get(&name) {
v.append(&mut macro_.clone());
} else {
v.push(t);
}
continue;
}
if t.ty != HashMark {
v.push(t);
continue;
}
if let Ident(ident) = tokens.next().unwrap().ty {
if &*ident == "define" {
t = tokens.next().unwrap();
if let TokenType::Ident(name) = t.ty {
let mut v2: Vec<Token> = vec![];
while let Some(t) = tokens.next() {
if t.ty == TokenType::NewLine {
break;
}
v2.push(t);
}
DEFINED.lock().unwrap().insert(name, v2);
} else {
bad_token(&t, "macro name expected");
}
continue;
}
if &*ident == "include" {
t = tokens.next().unwrap();
let path;
if let Str(ref s, _) = t.ty {
path = s.clone();
} else {
bad_token(&t, "string expected");
}
t = tokens.next().unwrap();
if t.ty != NewLine {
bad_token(&t, "newline expected");
}
let mut nv = tokenize(path);
v.append(&mut nv);
}
} else {
bad_token(&t, "identifier expected");
}
}
v
}
|
//! # 188. 买卖股票的最佳时机 IV
//! https://leetcode-cn.com/problems/best-time-to-buy-and-sell-stock-iv/
pub struct Solution;
impl Solution {
pub fn max_profit(k: i32, prices: Vec<i32>) -> i32 {
if prices.is_empty() {
return 0;
}
let k = k.min((prices.len() / 2) as i32);
let (mut buys, mut sells) = (
vec![vec![0; k as usize + 1]; prices.len()],
vec![vec![0; k as usize + 1]; prices.len()],
);
buys[0][0] = -prices[0];
sells[0][0] = 0;
for i in 1..=k as usize {
buys[0][i] = std::i32::MIN / 2;
sells[0][i] = std::i32::MIN / 2;
}
for i in 1..prices.len() {
buys[i][0] = std::cmp::max(buys[i - 1][0], sells[i - 1][0] - prices[i]);
for j in 1..=k as usize {
buys[i][j] = std::cmp::max(buys[i - 1][j], sells[i - 1][j] - prices[i]);
sells[i][j] = std::cmp::max(sells[i - 1][j], buys[i - 1][j - 1] + prices[i]);
}
}
let mut ans = std::i32::MIN;
for v in sells[prices.len() - 1].iter() {
if v > &ans {
ans = *v;
}
}
ans
}
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(super::Solution::max_profit(2, vec![4, 2, 1, 7]), 6);
}
}
|
#[doc = "Register `LCCR` reader"]
pub type R = crate::R<LCCR_SPEC>;
#[doc = "Register `LCCR` writer"]
pub type W = crate::W<LCCR_SPEC>;
#[doc = "Field `CMDSIZE` reader - CMDSIZE"]
pub type CMDSIZE_R = crate::FieldReader<u16>;
#[doc = "Field `CMDSIZE` writer - CMDSIZE"]
pub type CMDSIZE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 16, O, u16>;
impl R {
#[doc = "Bits 0:15 - CMDSIZE"]
#[inline(always)]
pub fn cmdsize(&self) -> CMDSIZE_R {
CMDSIZE_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15 - CMDSIZE"]
#[inline(always)]
#[must_use]
pub fn cmdsize(&mut self) -> CMDSIZE_W<LCCR_SPEC, 0> {
CMDSIZE_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DSI Host LTDC command configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`lccr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`lccr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct LCCR_SPEC;
impl crate::RegisterSpec for LCCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`lccr::R`](R) reader structure"]
impl crate::Readable for LCCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`lccr::W`](W) writer structure"]
impl crate::Writable for LCCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets LCCR to value 0"]
impl crate::Resettable for LCCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use plotters::prelude::*;
use stats::{mean, stddev};
use std::env;
use std::fs::File;
use std::io::BufReader;
mod bench_data;
fn main() -> Result<(), Box<dyn std::error::Error>> {
let args: Vec<_> = env::args().collect();
let plot_type = &args[1];
let plot_file = &args[2];
if plot_type == "p" {
return probability_plot(&plot_file[..]);
} else if plot_type == "n" {
return n_plot(&plot_file[..]);
} else if plot_type == "d" {
return density_plot(&plot_file[..]);
}
Ok(())
}
fn n_plot(filename: &str) -> Result<(), Box<dyn std::error::Error>> {
let colours = vec![&BLUE, &GREEN, &MAGENTA, &RED, &CYAN, &BLACK];
let mut colour_iter = colours.iter().cycle();
print!("{}", filename);
let data: bench_data::BenchGroup = read_data(filename)?;
let plotting_data: Vec<(String, Vec<_>)> = data
.ids
.iter()
.map(|id| {
(
id.id.clone(),
id.entries
.iter()
.map(|ent| {
let mean = mean(ent.times.iter().cloned());
let devs = stddev(ent.times.iter().cloned());
(ent.val, mean - devs, mean, mean + devs)
})
.collect(),
)
})
.collect();
//Find max values for plotting range.
let y_max = plotting_data
.iter()
.flat_map(|x| x.1.iter().map(|(_x, _min, _mean, max)| *max))
.fold(f64::NEG_INFINITY, |a, b| a.max(b));
let x_max = plotting_data
.iter()
.flat_map(|x| x.1.iter().map(|(x, _min, _mean, _max)| *x as i32))
.max()
.expect("Must be at least one point");
let x_min = plotting_data
.iter()
.flat_map(|x| x.1.iter().map(|(x, _min, _mean, _max)| *x as i32))
.min()
.expect("Must be at least one point");
let plotname = format!("plots/{}.png", data.group_name);
let root = BitMapBackend::new(&plotname, (1024, 768)).into_drawing_area();
root.fill(&WHITE)?;
let mut chart = ChartBuilder::on(&root)
// .caption(data.group_name, ("sans-serif", 40).into_font())
.margin(40)
.x_label_area_size(20)
.y_label_area_size(30)
.set_label_area_size(LabelAreaPosition::Left, 60)
.set_label_area_size(LabelAreaPosition::Bottom, 40)
.build_cartesian_2d(x_min..(x_max + 1), 0f64..y_max)?;
chart
.configure_mesh()
.x_labels(10)
.x_desc("n")
.y_labels(5)
.y_label_formatter(&|y| format!("{:+e}", y.round()))
.y_desc("Mean Time (ns)")
.draw()?;
for (name, id) in &plotting_data {
let colour = colour_iter.next().expect("cycle iterator").clone();
// Line Series
chart.draw_series(LineSeries::new(
id.iter().map(|(x, _min, mean, _max)| (*x as i32, *mean)),
colour.clone(),
))?;
// Point Series
chart
.draw_series(id.iter().map(|&(x, ymin, ymean, ymax)| {
ErrorBar::new_vertical(x as i32, ymin, ymean, ymax, colour.clone().filled(), 5)
}))?
.label(name)
.legend(move |(x, y)| PathElement::new(vec![(x, y), (x + 20, y)], colour.filled()));
}
chart
.configure_series_labels()
.background_style(WHITE.filled())
.border_style(&BLACK)
.draw()?;
Ok(())
}
fn probability_plot(filename: &str) -> Result<(), Box<dyn std::error::Error>> {
let colours = vec![&BLUE, &GREEN, &MAGENTA, &RED, &CYAN, &BLACK];
let mut colour_iter = colours.iter().cycle();
let data: bench_data::BenchGroup = read_data(filename)?;
let plotting_data: Vec<(String, Vec<_>)> = data
.ids
.iter()
.map(|id| {
(
id.id.clone(),
id.entries
.iter()
.map(|ent| {
let mean = mean(ent.times.iter().cloned());
let devs = stddev(ent.times.iter().cloned());
(ent.val, mean - devs, mean, mean + devs)
})
.collect(),
)
})
.collect();
//Find max values for plotting range.
let y_max = plotting_data
.iter()
.flat_map(|x| x.1.iter().map(|(_x, _min, _mean, max)| *max))
.fold(f64::NEG_INFINITY, |a, b| a.max(b));
let x_min = plotting_data
.iter()
.flat_map(|x| x.1.iter().map(|(x, _min, _mean, _max)| *x))
.fold(f32::INFINITY, |a, b| a.min(b));
let plotname = format!("plots/{}.png", data.group_name);
let root = BitMapBackend::new(&plotname, (1024, 768)).into_drawing_area();
root.fill(&WHITE)?;
let mut chart = ChartBuilder::on(&root)
// .caption(data.group_name, ("sans-serif", 40).into_font())
.margin(40)
.x_label_area_size(20)
.y_label_area_size(30)
.set_label_area_size(LabelAreaPosition::Left, 60)
.set_label_area_size(LabelAreaPosition::Bottom, 40)
.build_cartesian_2d(0f32..1.0, 0f64..y_max)?;
chart
.configure_mesh()
.x_labels(10)
.x_desc("p")
.y_labels(5)
.y_label_formatter(&|y| format!("{:+e}", y.round()))
.y_desc("Mean Time (ns)")
.draw()?;
for (name, id) in &plotting_data {
let colour = colour_iter.next().expect("cycle iterator").clone();
// Line Series
chart.draw_series(LineSeries::new(
id.iter().map(|(x, _min, mean, _max)| (*x, *mean)),
colour.clone(),
))?;
// Point Series
chart
.draw_series(id.iter().map(|&(x, ymin, ymean, ymax)| {
ErrorBar::new_vertical(x, ymin, ymean, ymax, colour.clone().filled(), 5)
}))?
.label(name)
.legend(move |(x, y)| PathElement::new(vec![(x, y), (x + 20, y)], colour.filled()));
}
chart
.configure_series_labels()
.background_style(WHITE.filled())
.border_style(&BLACK)
.draw()?;
Ok(())
}
fn density_plot(filename: &str) -> Result<(), Box<dyn std::error::Error>> {
let colours = vec![&BLUE, &GREEN, &MAGENTA, &RED, &CYAN, &BLACK];
let mut colour_iter = colours.iter().cycle();
let data: bench_data::EvalGroup = read_eval_data(filename)?;
let plotting_data: Vec<(String, Vec<_>)> = data
.ids
.iter()
.map(|id| {
(
id.id.clone(),
id.entries
.iter()
.map(|ent| (ent.x.clone(), ent.y.clone()))
.collect(),
)
})
.collect();
//Find max values for plotting range.
let y_max = plotting_data
.iter()
.flat_map(|x| x.1.iter().map(|(_x, y)| *y))
.max()
.expect("Must be at least one data point");
let plotname = format!("plots/{}.png", data.group_name);
let root = BitMapBackend::new(&plotname, (1024, 768)).into_drawing_area();
root.fill(&WHITE)?;
let mut chart = ChartBuilder::on(&root)
// .caption(data.group_name, ("sans-serif", 40).into_font())
.margin(40)
.x_label_area_size(20)
.y_label_area_size(30)
.set_label_area_size(LabelAreaPosition::Left, 60)
.set_label_area_size(LabelAreaPosition::Bottom, 40)
.build_cartesian_2d(0f32..1.0, 0u32..(y_max + 1))?;
chart
.configure_mesh()
.x_labels(10)
.x_desc("Edge Density")
.y_labels(5)
.y_desc("Bound Value")
.draw()?;
for (name, id) in &plotting_data {
let colour = colour_iter.next().expect("cycle iterator").clone();
// Point Series
chart
.draw_series(
id.iter()
.map(|&(x, y)| Circle::new((x, y), 1, colour.clone().filled())),
)?
.label(name)
.legend(move |(x, y)| PathElement::new(vec![(x, y), (x + 20, y)], colour.filled()));
}
chart
.configure_series_labels()
.position(SeriesLabelPosition::MiddleLeft)
.background_style(WHITE.filled())
.border_style(&BLACK)
.draw()?;
Ok(())
}
fn read_data(filename: &str) -> Result<bench_data::BenchGroup, Box<dyn std::error::Error>> {
let file = File::open(filename)?;
let reader = BufReader::new(file);
let data: bench_data::BenchGroup = serde_json::from_reader(reader)?;
Ok(data)
}
fn read_eval_data(filename: &str) -> Result<bench_data::EvalGroup, Box<dyn std::error::Error>> {
let file = File::open(filename)?;
let reader = BufReader::new(file);
let data: bench_data::EvalGroup = serde_json::from_reader(reader)?;
Ok(data)
}
|
/// To check if the widget's label matches the given string.
///
/// Example:
///
/// ```
/// extern crate gtk;
/// #[macro_use]
/// extern crate gtk_test;
///
/// use gtk::{Button, ButtonExt, LabelExt};
///
/// # fn main() {
/// gtk::init().expect("GTK init failed");
/// let but = Button::new();
/// but.set_label("text");
/// assert_label!(but, "text");
/// # }
/// ```
#[macro_export]
macro_rules! assert_label {
($widget:expr, $string:expr) => {
assert_eq!($widget.get_label().expect("get label"), $string.to_string());
};
}
/// To check if the widget's text matches the given string.
///
/// Example:
///
/// ```
/// extern crate gtk;
/// #[macro_use]
/// extern crate gtk_test;
///
/// use gtk::{Label, LabelExt};
///
/// # fn main() {
/// gtk::init().expect("GTK init failed");
/// let label = Label::new("I'm a label!");
/// assert_text!(label, "I'm a label!");
/// # }
/// ```
#[macro_export]
macro_rules! assert_text {
($widget:expr, $string:expr) => {
assert_eq!($widget.get_text().expect("get text"), $string.to_string());
};
}
/// To check if the widget's title matches the given string.
///
/// Example:
///
/// ```
/// extern crate gtk;
/// #[macro_use]
/// extern crate gtk_test;
///
/// use gtk::{GtkWindowExt, Window, WindowType};
///
/// # fn main() {
/// gtk::init().expect("GTK init failed");
/// let window = Window::new(WindowType::Toplevel);
/// window.set_title("Fromage ?");
/// assert_title!(window, "Fromage ?");
/// # }
/// ```
#[macro_export]
macro_rules! assert_title {
($widget:expr, $string:expr) => {
assert_eq!($widget.get_title().expect("get text"), $string.to_string());
};
}
/// To check if the widget's name matches the given string.
///
/// Example:
///
/// ```
/// extern crate gtk;
/// #[macro_use]
/// extern crate gtk_test;
///
/// use gtk::{WidgetExt, Button};
///
/// # fn main() {
/// gtk::init().expect("GTK init failed");
/// let button = Button::new();
/// button.set_name("Omelette");
/// assert_name!(button, "Omelette");
/// # }
/// ```
#[macro_export]
macro_rules! assert_name {
($widget:expr, $string:expr) => {
assert_eq!($widget.get_name().expect("get text"), $string.to_string());
};
}
/// Create a new observer for signals.
///
/// Example:
///
/// ```
/// extern crate gtk;
/// #[macro_use]
/// extern crate gtk_test;
///
/// use gtk::GtkWindowExt;
///
/// # fn main() {
/// gtk::init().expect("initialization failed");
/// let window = gtk::Window::new(gtk::WindowType::Toplevel);
///
/// let observer = observer_new!(window, connect_activate_focus, |_|);
/// window.emit_activate_focus();
/// observer.wait();
/// # }
/// ```
///
/// You can also give a block to the macro that will be called when the signal is triggered:
///
/// ```
/// extern crate gtk;
/// #[macro_use]
/// extern crate gtk_test;
///
/// use gtk::GtkWindowExt;
///
/// # fn main() {
/// gtk::init().expect("initialization failed");
/// let window = gtk::Window::new(gtk::WindowType::Toplevel);
///
/// let observer = observer_new!(window, connect_activate_focus, |w| {
/// w.set_title("Caribou !");
/// });
/// window.emit_activate_focus();
/// observer.wait();
/// assert_title!(window, "Caribou !");
/// # }
/// ```
#[macro_export]
macro_rules! observer_new {
($widget:expr, $signal_name:ident, |$e1:pat $(,$e:pat)*|) => {{
let observer = $crate::Observer::new();
let res = (*observer.get_inner()).clone();
$widget.$signal_name(move |$e1 $(,$e:expr)*| {
*res.borrow_mut() = true;
});
observer
}};
($widget:expr, $signal_name:ident, |$e1:pat $(,$e:pat)*| $block:block) => {{
let observer = $crate::Observer::new();
let res = (*observer.get_inner()).clone();
$widget.$signal_name(move |$e1 $(,$e:expr)*| {
let _ = $block;
*res.borrow_mut() = true;
});
observer
}}
}
|
mod util;
use std::io::BufRead;
use util::error_exit;
fn get_fuel(mass: i64) -> i64 {
util::clip_min(mass / 3 - 2, 0)
}
fn get_fuel_recursive(mass: i64) -> i64 {
let fuel = get_fuel(mass);
match fuel {
0 => fuel,
_ => fuel + get_fuel_recursive(fuel),
}
}
fn main() {
let solver = match util::part_id_from_cli() {
util::PartID::One => get_fuel,
util::PartID::Two => get_fuel_recursive,
};
let result: i64 = std::io::stdin()
.lock()
.lines()
.map(or_abort!("Failed to read line"))
.map(|line| line.parse::<i64>())
.map(or_abort!("Failed to parse integer."))
.map(solver)
.sum();
println!("{}", result)
}
|
use super::statement::Statement;
#[derive(Debug, Clone)]
pub struct Job {
pub statements: Vec<Statement>,
pub total_duration: u64,
pub total_cpu_duration: u64,
pub total_io_duration: u64,
pub is_io_bound: bool,
}
impl Job {
pub fn cpu_bound(total_duration: u64) -> Self {
Self {
statements: vec![Statement::cpu_bound(total_duration)],
total_duration,
total_cpu_duration: total_duration,
total_io_duration: 0,
is_io_bound: false,
}
}
/// ios: I/O statements count
pub fn io_bound(total_duration: u64, ios: u64) -> Self {
let total_cpu_duration = total_duration * 2 / 10;
let total_io_duration = total_duration - total_cpu_duration;
let cpu_duration = total_cpu_duration / ios;
let io_duration = total_io_duration / ios;
let mut statements = vec![];
for _ in 0..ios {
statements.push(Statement::cpu_bound(cpu_duration));
statements.push(Statement::io_bound(io_duration));
}
Self {
statements,
total_duration,
total_cpu_duration,
total_io_duration,
is_io_bound: true,
}
}
pub fn type_hint(&self) -> &str {
if self.is_io_bound {
"I/O Bound"
} else {
"CPU Bound"
}
}
/// `Response Ratio = (Waiting Time + Burst time) / Burst time`
pub fn response_ratio(&self) -> u64 {
self.total_duration.checked_div(self.total_cpu_duration).unwrap_or(1)
}
}
|
#[cfg(any(target_os = "android"))]
use backtrace::Backtrace;
#[cfg(any(target_os = "android"))]
use log::trace;
use sdl2::filesystem::pref_path;
use sdl2::keyboard::Keycode;
use sdl2::mouse::MouseButton;
use sdl2::rwops::RWops;
use shrev::EventChannel;
use specs::prelude::*;
use specs::World as SpecsWorld;
#[cfg(any(target_os = "android"))]
use std::panic;
use std::sync::{Arc, Mutex};
// use rand::prelude::*;
use crate::gui::{Primitive, UI};
use crate::setup::*;
use crate::systems::{
AISystem, CollisionSystem, CommonRespawn, ControlSystem, ControllingSystem,
DeadScreen, DestroySync, GUISystem, GamePlaySystem, InsertSystem,
KinematicSystem, MenuRenderingSystem, RenderingSystem, ScoreTableRendering,
SoundSystem, UpgradeGUI, UpgradeControlSystem, Upgrader
};
use common::*;
use components::*;
use gfx_h::{effects::MenuParticles, Canvas, MovementParticles, ParticlesData};
// #[cfg(not(any(target_os = "ios", target_os = "android", target_os = "emscripten"))]
use log::info;
use physics::safe_maintain;
use physics_system::PhysicsSystem;
use red::glow::RenderLoop;
use sound::init_sound;
use std::fs::File;
use std::path::Path;
use telemetry::TimeSpans;
pub fn run() -> Result<(), String> {
#[cfg(any(target_os = "android"))]
setup_android();
#[cfg(any(target_os = "android"))]
trace!(
"hello androigeni4!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
);
let mut specs_world = SpecsWorld::new();
data_setup(&mut specs_world);
#[cfg(not(any(target_os = "android")))]
let _guard = setup_logging();
let telegraph = setup_telegraph();
let time_spans = TimeSpans::new();
#[cfg(any(target_os = "android"))]
#[cfg(any(target_os = "android"))]
trace!("opa 1");
setup_physics(&mut specs_world);
// We need to own _gl_context to avoid RAII crazyness
let (context, sdl_context, render_loop, _gl_context, hdpi, canvas) =
setup_gfx(&mut specs_world)?;
#[cfg(any(target_os = "android"))]
trace!("opa 2");
// Hide the cursor
sdl_context.mouse().show_cursor(false);
setup_text(&context, &mut specs_world);
let atlas = read_atlas("assets/out.ron");
#[cfg(any(target_os = "android"))]
trace!("opa 3");
let name_to_atlas = setup_images(&atlas);
let mut asteroids_spawn_channel: EventChannel<InsertEvent> =
EventChannel::with_capacity(100);
let mut keys_channel: EventChannel<Keycode> =
EventChannel::with_capacity(100);
let mut sounds_channel: EventChannel<Sound> =
EventChannel::with_capacity(30);
let mut insert_channel: EventChannel<InsertEvent> =
EventChannel::with_capacity(100);
let mut primitives_channel: EventChannel<Primitive> =
EventChannel::with_capacity(100);
let name_to_animation = load_animations(&atlas);
load_description(&mut specs_world, &name_to_atlas);
let preloaded_images = preloaded_images(&name_to_atlas, &name_to_animation);
let size = 10f32;
let movement_particles = ThreadPin::new(ParticlesData::MovementParticles(
MovementParticles::new_quad(&context, -size, -size, size, size, 100),
));
// let engine_particles = ThreadPin::new(ParticlesData::Engine(
// Engine::new(&display, )
// ))
let movement_particles_entity =
specs_world.create_entity().with(movement_particles).build();
let preloaded_particles = PreloadedParticles {
movement: movement_particles_entity,
};
let destroy_sync =
DestroySync::new(asteroids_spawn_channel.register_reader());
specs_world.add_resource(Arc::new(Mutex::new(asteroids_spawn_channel)));
let physics_system = PhysicsSystem::default();
let insert_system = InsertSystem::new(insert_channel.register_reader());
let rendering_system =
RenderingSystem::new(primitives_channel.register_reader());
let rendering_system2 =
RenderingSystem::new(primitives_channel.register_reader());
let menu_rendering_system = MenuRenderingSystem;
let dead_screen_system = DeadScreen::default();
let common_respawn = CommonRespawn::default();
let mut dead_screen_dispatcher = DispatcherBuilder::new()
.with(common_respawn.clone(), "common_respawn", &[])
.with_thread_local(physics_system.clone())
.with_thread_local(dead_screen_system)
.build();
let mut menu_dispatcher = DispatcherBuilder::new()
.with(common_respawn.clone(), "common_respawn", &[])
.with_thread_local(menu_rendering_system)
.with_thread_local(rendering_system2)
.with_thread_local(physics_system.clone())
.build();
let score_table_system =
ScoreTableRendering::new(primitives_channel.register_reader());
let mut score_table_dispatcher = DispatcherBuilder::new()
.with_thread_local(score_table_system)
.build();
let sound_system = SoundSystem::new(sounds_channel.register_reader());
let control_system = ControlSystem::new(keys_channel.register_reader());
let upgrade_control_system = UpgradeControlSystem::default();
let upgrader = Upgrader::default();
let gameplay_sytem = GamePlaySystem::default();
let collision_system = CollisionSystem::default();
let ai_system = AISystem::default();
let gui_system = GUISystem::default();
let controlling_system = ControllingSystem::default();
let (preloaded_sounds, music_data, _audio, _mixer, timer) =
init_sound(&sdl_context, &mut specs_world)?;
specs_world.add_resource(NebulaGrid::new(1, 100f32, 100f32, 50f32, 50f32));
specs_world.add_resource(PlanetGrid::new(1, 60f32, 60f32, 30f32, 30f32));
specs_world.add_resource(StarsGrid::new(3, 40f32, 40f32, 4f32, 4f32));
specs_world.add_resource(FogGrid::new(2, 50f32, 50f32, 5f32, 5f32));
// specs_world.add_resource(MacroGame{coins: 0, score_table: 0});
// specs_world.add_resource(TimeTracker::new());
specs_world.add_resource(name_to_atlas);
specs_world.add_resource(ThreadPin::new(music_data));
specs_world.add_resource(Music::default());
specs_world.add_resource(LoopSound::default());
specs_world.add_resource(preloaded_sounds);
specs_world.add_resource(preloaded_particles);
specs_world.add_resource(ThreadPin::new(timer));
specs_world.add_resource(ThreadPin::new(MenuParticles::new_quad(
&context,
(-size, size),
(-size, size),
(-20.0, 20.0),
200,
)));
specs_world.add_resource(GlobalParams::default());
let mut sound_dispatcher = DispatcherBuilder::new()
.with_thread_local(sound_system)
.build();
let mut rendering_dispatcher = DispatcherBuilder::new()
.with_thread_local(rendering_system)
.build();
let mut dispatcher = DispatcherBuilder::new()
// .with(control_system, "control_system", &[])
.with_thread_local(control_system)
.with(gameplay_sytem, "gameplay_system", &[])
.with(common_respawn, "common_respawn", &[])
.with(ai_system, "ai_system", &[])
.with(collision_system, "collision_system", &["ai_system"])
.with(
physics_system,
"physics_system",
&[
// "kinematic_system",
// "control_system",
"gameplay_system",
"collision_system",
],
)
.with(destroy_sync, "destroy_sync", &[])
.with(KinematicSystem {}, "kinematic_system", &["physics_system"])
// .with_thread_local(insert_system)
.build();
let mut insert_dispatcher = DispatcherBuilder::new()
.with_thread_local(insert_system)
.build();
let mut gui_dispatcher = DispatcherBuilder::new()
.with(controlling_system.clone(), "controlling", &[])
.with_thread_local(gui_system)
.build();
let upgrade_gui_system = UpgradeGUI::default();
let mut upgrade_gui_dispatcher = DispatcherBuilder::new()
.with(upgrade_control_system, "upgrade_control_system", &[])
.with(upgrader, "upgrader", &["upgrade_control_system"])
.with(controlling_system, "controlling", &[])
.with_thread_local(upgrade_gui_system)
.build();
let keys_vec: Vec<Keycode> = vec![];
let upgrades_vec: Vec<UpgradeType> = vec![];
specs_world.add_resource(upgrades_vec);
specs_world.add_resource(keys_vec);
specs_world.add_resource(keys_channel);
specs_world.add_resource(sounds_channel);
specs_world.add_resource(insert_channel);
specs_world.add_resource(ThreadPin::new(context));
specs_world.add_resource(Mouse {
wdpi: hdpi,
hdpi: hdpi,
..Mouse::default()
});
specs_world.add_resource(ThreadPin::new(canvas));
specs_world.add_resource(preloaded_images);
specs_world.add_resource(AppState::Menu);
specs_world.add_resource(UI::default());
specs_world.add_resource(primitives_channel);
specs_world.add_resource(Progress::default());
specs_world.add_resource(telegraph);
specs_world.add_resource(time_spans);
// ------------------------------
let mut events_loop = sdl_context.event_pump().unwrap();
insert_dispatcher.dispatch(&specs_world.res);
safe_maintain(&mut specs_world);
render_loop.run(move |running: &mut bool| {
flame::start("loop");
info!("asteroids: start loop");
specs_world.write_resource::<DevInfo>().update();
let keys_iter: Vec<Keycode> = events_loop
.keyboard_state()
.pressed_scancodes()
.filter_map(Keycode::from_scancode)
.collect();
specs_world
.write_resource::<EventChannel<Keycode>>()
.iter_write(keys_iter.clone());
*specs_world
.write_resource::<Vec<Keycode>>()
= keys_iter;
// Create a set of pressed Keys.
flame::start("control crazyness");
info!("asteroids: control crazyness");
{
let state = events_loop.mouse_state();
let buttons: Vec<_> = state.pressed_mouse_buttons().collect();
let mut mouse_state = specs_world.write_resource::<Mouse>();
mouse_state.set_left(buttons.contains(&MouseButton::Left));
mouse_state.set_right(buttons.contains(&MouseButton::Right));
let dims =
specs_world.read_resource::<red::Viewport>().dimensions();
#[cfg(target_os = "android")]
trace!("wat1");
mouse_state.set_position(
state.x(),
state.y(),
specs_world.read_resource::<ThreadPin<Canvas>>().observer(),
dims.0 as u32,
dims.1 as u32,
specs_world.read_resource::<ThreadPin<Canvas>>().z_far,
);
#[cfg(target_os = "android")]
trace!("wat2");
// fingers
{
#[cfg(not(target_os = "android"))]
{
let mut touches = specs_world.write_resource::<Touches>();
touches[0] = if mouse_state.left {
Some(Finger::new(
0,
state.x() as f32,
state.y() as f32,
specs_world
.read_resource::<ThreadPin<Canvas>>()
.observer(),
0f32,
dims.0 as u32,
dims.1 as u32,
specs_world
.read_resource::<ThreadPin<Canvas>>()
.z_far,
))
} else {
None
};
}
#[cfg(target_os = "android")]
{
let mut touches = specs_world.write_resource::<Touches>();
// TODO add multy touch here
if sdl2::touch::num_touch_devices() > 0 {
let device = sdl2::touch::touch_device(0);
for i in 0..FINGER_NUMBER as i32 {
trace!("iterating over touch {}", i);
if let Some(finger) =
sdl2::touch::touch_finger(device, i)
{
touches[i as usize] = Some(Finger::new(
finger.id as usize,
finger.x * dims.0 as f32,
finger.y * dims.1 as f32,
specs_world
.read_resource::<ThreadPin<Canvas>>()
.observer(),
finger.pressure,
dims.0 as u32,
dims.1 as u32,
specs_world
.read_resource::<ThreadPin<Canvas>>()
.z_far,
));
} else {
touches[i as usize] = None
}
}
}
}
}
}
flame::end("control crazyness");
let app_state = *specs_world.read_resource::<AppState>();
match app_state {
AppState::Menu => menu_dispatcher.dispatch(&specs_world.res),
AppState::Play(play_state) => {
if let PlayState::Action = play_state {
flame::start("dispatch");
info!("asteroids: main dispatcher");
dispatcher.dispatch_seq(&specs_world.res);
dispatcher.dispatch_thread_local(&specs_world.res);
info!("asteroids: gui dispatcher");
gui_dispatcher.dispatch(&specs_world.res);
flame::end("dispatch");
} else {
info!("asteroids: upgrade dispatcher");
upgrade_gui_dispatcher.dispatch(&specs_world.res);
}
// specs_world.write_resource::<TimeSpans>().begin("rendering".to_string());
info!("asteroids: rendering dispatcher");
rendering_dispatcher.dispatch(&specs_world.res);
// specs_world.write_resource::<TimeSpans>().end("rendering".to_string())
}
AppState::ScoreTable => {
score_table_dispatcher.dispatch(&specs_world.res);
}
AppState::DeadScreen => {
info!("dead screen");
dead_screen_dispatcher.dispatch(&specs_world.res);
rendering_dispatcher.dispatch(&specs_world.res);
}
}
info!("asteroids: insert dispatcher");
flame::start("insert");
insert_dispatcher.dispatch(&specs_world.res);
flame::end("insert");
info!("asteroids: sounds dispatcher");
flame::start("sounds");
sound_dispatcher.dispatch(&specs_world.res);
flame::end("sounds");
flame::start("maintain");
info!("asteroids: maintain");
safe_maintain(&mut specs_world);
flame::end("maintain");
flame::start("events loop");
info!("asteroids: events loop");
for event in events_loop.poll_iter() {
use sdl2::event::Event;
match event {
Event::Quit { .. }
| Event::KeyDown {
keycode: Some(Keycode::Escape),
..
} => {
*running = false;
use ron::ser::{to_string_pretty, PrettyConfig};
use std::io::Write;
// use serde::Serialize;
let pretty = PrettyConfig {
depth_limit: 2,
separate_tuple_members: true,
enumerate_arrays: true,
..PrettyConfig::default()
};
let s = to_string_pretty(
&*specs_world.write_resource::<MacroGame>(),
pretty,
)
.expect("Serialization failed");
#[cfg(any(target_os = "android"))]
let pref = pref_path("vlad", "twenty_ateroids")
.expect("failed to get pref path");
let file = "rons/macro_game.ron";
#[cfg(any(target_os = "android"))]
let file = format!("{}/{}", pref, file);
// let mut rw = RWops::from_file(Path::new(&file), "r+").expect("failed to load macro game");
#[cfg(any(target_os = "android"))]
trace!("starting opening");
eprintln!("{}", s);
if let Ok(mut rw) = RWops::from_file(Path::new(&file), "w+")
{
rw.write(s.as_bytes())
.expect("failed to load macro game");
} else {
let mut rw = RWops::from_file(Path::new(&file), "w")
.expect("failed to load macro game");
rw.write(s.as_bytes()).expect("failed to write");
}
flame::dump_html(
&mut File::create("flame-graph.html").unwrap(),
)
.unwrap();
}
sdl2::event::Event::Window {
win_event: sdl2::event::WindowEvent::Resized(w, h),
..
} => {
let mut viewport =
specs_world.write_resource::<red::Viewport>();
viewport.update_size(w, h);
let context =
specs_world.read_resource::<ThreadPin<red::GL>>();
viewport.set_used(&*context);
}
_ => (),
}
}
flame::end("events loop");
// ::std::thread::sleep(Duration::new(0, 1_000_000_000u32 / 60));
flame::end("loop");
if flame::spans().len() > 10 {
flame::clear();
}
});
Ok(())
}
|
use bellman::multicore::Worker;
use bellman::SynthesisError;
use bellman::domain::{EvaluationDomain, Scalar};
use pairing::{Engine, Field};
use clear_on_drop::clear::Clear;
pub fn add_polynomials<E: Engine>(a: &mut [E::Fr], b: &[E::Fr]) {
assert_eq!(a.len(), b.len());
let worker = Worker::new();
worker.scope(a.len(), |scope, chunk| {
for (a, b) in a.chunks_mut(chunk).zip(b.chunks(chunk)) {
scope.spawn(move |_| {
for (a, b) in a.iter_mut().zip(b.iter()) {
a.add_assign(b);
}
});
}
});
}
pub fn mul_polynomials<E: Engine>(a: &[E::Fr], b: &[E::Fr]) -> Result<Vec<E::Fr>, SynthesisError> {
let res_len = a.len() + b.len() - 1;
let worker = Worker::new();
let scalars_a: Vec<Scalar<E>> = a.into_iter().map(|e| Scalar::<E>(*e)).collect();
// the size of evaluation domain is polynomial's multiplied by other.
let mut domain_a = EvaluationDomain::from_coeffs_into_sized(scalars_a, res_len)?;
let scalars_b: Vec<Scalar<E>> = b.into_iter().map(|e| Scalar::<E>(*e)).collect();
let mut domain_b = EvaluationDomain::from_coeffs_into_sized(scalars_b, res_len)?;
// Convert to point-value representations
domain_a.fft(&worker);
domain_b.fft(&worker);
// Perform O(n) multiplication of two polynomials in the domain.
domain_a.mul_assign(&worker, &domain_b);
drop(domain_b);
// Convert back to point-value representations
domain_a.ifft(&worker);
let mut mul_res: Vec<E::Fr> = domain_a.into_coeffs().iter().map(|e| e.0).collect();
mul_res.truncate(res_len);
Ok(mul_res)
}
|
//
// Copyright 2019 The Project Oak Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//! Helper library for accessing Oak storage services.
extern crate protobuf;
use crate::grpc;
use crate::{ReadHandle, WriteHandle};
use proto::storage_channel::{
StorageChannelDeleteRequest, StorageChannelReadRequest, StorageChannelRequest,
StorageChannelResponse, StorageChannelWriteRequest,
};
use protobuf::Message;
/// Local representation of the connection to an external storage service.
pub struct Storage {
write_channel: crate::io::Channel,
wait_space: Vec<u8>,
read_channel: crate::ReadHandle,
}
impl Default for Storage {
/// Create a default `Storage` instance assuming the standard port names
/// (`"storage_in"`, `"storage_out"`) for pre-defined channels for storage
/// communication.
fn default() -> Storage {
Storage::new("storage_in", "storage_out")
}
}
impl Storage {
/// Create a `Storage` instance using the given port names for pre-defined
/// channels for storage communication.
pub fn new(in_port_name: &str, out_port_name: &str) -> Storage {
let read_handle = ReadHandle {
handle: crate::channel_find(in_port_name),
};
let write_handle = WriteHandle {
handle: crate::channel_find(out_port_name),
};
let handles = vec![read_handle];
Storage {
write_channel: crate::io::Channel::new(write_handle),
wait_space: crate::new_handle_space(&handles),
read_channel: read_handle,
}
}
fn execute_operation(
&mut self,
operation_request: &StorageChannelRequest,
) -> StorageChannelResponse {
info!(
"StorageChannelRequest: {}",
protobuf::text_format::print_to_string(operation_request)
);
operation_request
.write_to_writer(&mut self.write_channel)
.unwrap();
// Block until there is a response available.
unsafe {
crate::wasm::wait_on_channels(self.wait_space.as_mut_ptr(), 1);
}
let mut buffer = Vec::<u8>::with_capacity(256);
let mut handles = Vec::<crate::Handle>::with_capacity(1);
crate::channel_read(self.read_channel, &mut buffer, &mut handles);
if !handles.is_empty() {
panic!("unexpected handles received alongside storage request")
}
let response: StorageChannelResponse =
protobuf::parse_from_reader(&mut &buffer[..]).unwrap();
info!(
"StorageChannelResponse: {}",
protobuf::text_format::print_to_string(&response)
);
response
}
/// Read the value associated with the given `name` from the storage
/// instance identified by `name`.
pub fn read(&mut self, storage_name: &[u8], name: &[u8]) -> grpc::Result<Vec<u8>> {
let mut read_request = StorageChannelReadRequest::new();
read_request.datum_name = name.to_owned();
let mut operation_request = StorageChannelRequest::new();
operation_request.storage_name = storage_name.to_owned();
operation_request.set_read_request(read_request);
let mut operation_response = self.execute_operation(&operation_request);
let status = operation_response.take_status();
if status.code != 0 {
Err(status)
} else {
Ok(operation_response
.get_read_response()
.get_datum_value()
.to_vec())
}
}
/// Set the value associated with the given `name` from the storage instance
/// identified by `name`.
pub fn write(&mut self, storage_name: &[u8], name: &[u8], value: &[u8]) -> grpc::Result<()> {
let mut write_request = StorageChannelWriteRequest::new();
write_request.datum_name = name.to_owned();
write_request.datum_value = value.to_owned();
let mut operation_request = StorageChannelRequest::new();
operation_request.storage_name = storage_name.to_owned();
operation_request.set_write_request(write_request);
let mut operation_response = self.execute_operation(&operation_request);
let status = operation_response.take_status();
if status.code != 0 {
Err(status)
} else {
Ok(())
}
}
/// Delete the value associated with the given `name` from the storage
/// instance identified by `name`.
pub fn delete(&mut self, storage_name: &[u8], name: &[u8]) -> grpc::Result<()> {
let mut delete_request = StorageChannelDeleteRequest::new();
delete_request.datum_name = name.to_owned();
let mut operation_request = StorageChannelRequest::new();
operation_request.storage_name = storage_name.to_owned();
operation_request.set_delete_request(delete_request);
let mut operation_response = self.execute_operation(&operation_request);
let status = operation_response.take_status();
if status.code != 0 {
Err(status)
} else {
Ok(())
}
}
}
|
fn hanoi(count: i32, current: i32, target: i32, other: i32) {
if count == 0 {
return;
}
hanoi(count-1, current, other, target);
println!("{} -> {}", current, target);
hanoi(count-1, other, target, current);
}
fn main() {
hanoi(6, 1, 3, 2);
}
|
use crate::sidebar::make_section;
use crate::{
gui::{BuildContext, Ui, UiMessage, UiNode},
physics::Joint,
scene::commands::{
physics::{
SetPrismaticJointAnchor1Command, SetPrismaticJointAnchor2Command,
SetPrismaticJointAxis1Command, SetPrismaticJointAxis2Command,
},
SceneCommand,
},
send_sync_message,
sidebar::{make_text_mark, make_vec3_input_field, COLUMN_WIDTH, ROW_HEIGHT},
Message,
};
use rg3d::{
core::pool::Handle,
gui::{
grid::{Column, GridBuilder, Row},
message::{MessageDirection, UiMessageData, Vec3EditorMessage},
widget::WidgetBuilder,
},
physics3d::desc::PrismaticJointDesc,
};
use std::sync::mpsc::Sender;
pub struct PrismaticJointSection {
pub section: Handle<UiNode>,
joint_anchor: Handle<UiNode>,
joint_axis: Handle<UiNode>,
connected_anchor: Handle<UiNode>,
connected_axis: Handle<UiNode>,
sender: Sender<Message>,
}
impl PrismaticJointSection {
pub fn new(ctx: &mut BuildContext, sender: Sender<Message>) -> Self {
let joint_anchor;
let joint_axis;
let connected_anchor;
let connected_axis;
let section = make_section(
"Prismatic Joint Properties",
GridBuilder::new(
WidgetBuilder::new()
.with_child(make_text_mark(ctx, "Joint Anchor", 0))
.with_child({
joint_anchor = make_vec3_input_field(ctx, 0);
joint_anchor
})
.with_child(make_text_mark(ctx, "Joint Axis", 1))
.with_child({
joint_axis = make_vec3_input_field(ctx, 1);
joint_axis
})
.with_child(make_text_mark(ctx, "Connected Anchor", 2))
.with_child({
connected_anchor = make_vec3_input_field(ctx, 2);
connected_anchor
})
.with_child(make_text_mark(ctx, "Connected Axis", 3))
.with_child({
connected_axis = make_vec3_input_field(ctx, 3);
connected_axis
}),
)
.add_column(Column::strict(COLUMN_WIDTH))
.add_column(Column::stretch())
.add_row(Row::strict(ROW_HEIGHT))
.add_row(Row::strict(ROW_HEIGHT))
.add_row(Row::strict(ROW_HEIGHT))
.add_row(Row::strict(ROW_HEIGHT))
.build(ctx),
ctx,
);
Self {
section,
sender,
joint_anchor,
joint_axis,
connected_anchor,
connected_axis,
}
}
pub fn sync_to_model(&mut self, prismatic: &PrismaticJointDesc, ui: &mut Ui) {
send_sync_message(
ui,
Vec3EditorMessage::value(
self.joint_anchor,
MessageDirection::ToWidget,
prismatic.local_anchor1,
),
);
send_sync_message(
ui,
Vec3EditorMessage::value(
self.joint_axis,
MessageDirection::ToWidget,
prismatic.local_axis1,
),
);
send_sync_message(
ui,
Vec3EditorMessage::value(
self.connected_anchor,
MessageDirection::ToWidget,
prismatic.local_anchor2,
),
);
send_sync_message(
ui,
Vec3EditorMessage::value(
self.connected_axis,
MessageDirection::ToWidget,
prismatic.local_axis2,
),
);
}
pub fn handle_message(
&mut self,
message: &UiMessage,
prismatic: &PrismaticJointDesc,
handle: Handle<Joint>,
) {
if let UiMessageData::Vec3Editor(Vec3EditorMessage::Value(value)) = *message.data() {
if message.direction() == MessageDirection::FromWidget {
if message.destination() == self.joint_anchor && prismatic.local_anchor1.ne(&value)
{
self.sender
.send(Message::DoSceneCommand(
SceneCommand::SetPrismaticJointAnchor1(
SetPrismaticJointAnchor1Command::new(handle, value),
),
))
.unwrap();
} else if message.destination() == self.joint_axis
&& prismatic.local_axis1.ne(&value)
{
self.sender
.send(Message::DoSceneCommand(
SceneCommand::SetPrismaticJointAxis1(
SetPrismaticJointAxis1Command::new(handle, value),
),
))
.unwrap();
} else if message.destination() == self.connected_anchor
&& prismatic.local_anchor2.ne(&value)
{
self.sender
.send(Message::DoSceneCommand(
SceneCommand::SetPrismaticJointAnchor2(
SetPrismaticJointAnchor2Command::new(handle, value),
),
))
.unwrap();
} else if message.destination() == self.connected_axis
&& prismatic.local_axis2.ne(&value)
{
self.sender
.send(Message::DoSceneCommand(
SceneCommand::SetPrismaticJointAxis2(
SetPrismaticJointAxis2Command::new(handle, value),
),
))
.unwrap();
}
}
}
}
}
|
impl Solution {
pub fn total_hamming_distance(nums: Vec<i32>) -> i32 {
let (mut res,n) = (0,nums.len() as i32);
for i in 0..30{
let mut c = 0;
for num in nums.iter(){
if num >> i & 1 == 0{
c += 1
}
}
res += c * (n - c)
}
res
}
}
|
mod abort_transaction;
mod aggregate;
mod commit_transaction;
mod count;
mod count_documents;
mod create;
mod create_indexes;
mod delete;
mod distinct;
mod drop_collection;
mod drop_database;
mod drop_indexes;
mod find;
mod find_and_modify;
mod get_more;
mod insert;
mod list_collections;
mod list_databases;
mod list_indexes;
mod raw_output;
mod run_command;
mod run_cursor_command;
mod update;
#[cfg(test)]
mod test;
use std::{collections::VecDeque, fmt::Debug, ops::Deref};
use bson::{RawBsonRef, RawDocument, RawDocumentBuf, Timestamp};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::{
bson::{self, Bson, Document},
bson_util::{self, extend_raw_document_buf},
client::{ClusterTime, HELLO_COMMAND_NAMES, REDACTED_COMMANDS},
cmap::{conn::PinnedConnectionHandle, Command, RawCommandResponse, StreamDescription},
error::{
BulkWriteError,
BulkWriteFailure,
CommandError,
Error,
ErrorKind,
Result,
WriteConcernError,
WriteFailure,
},
options::WriteConcern,
selection_criteria::SelectionCriteria,
Namespace,
};
pub(crate) use abort_transaction::AbortTransaction;
pub(crate) use aggregate::{Aggregate, AggregateTarget, ChangeStreamAggregate};
pub(crate) use commit_transaction::CommitTransaction;
pub(crate) use count::Count;
pub(crate) use count_documents::CountDocuments;
pub(crate) use create::Create;
pub(crate) use create_indexes::CreateIndexes;
pub(crate) use delete::Delete;
pub(crate) use distinct::Distinct;
pub(crate) use drop_collection::DropCollection;
pub(crate) use drop_database::DropDatabase;
pub(crate) use drop_indexes::DropIndexes;
pub(crate) use find::Find;
pub(crate) use find_and_modify::FindAndModify;
pub(crate) use get_more::GetMore;
pub(crate) use insert::Insert;
pub(crate) use list_collections::ListCollections;
pub(crate) use list_databases::ListDatabases;
pub(crate) use list_indexes::ListIndexes;
#[cfg(feature = "in-use-encryption-unstable")]
pub(crate) use raw_output::RawOutput;
pub(crate) use run_command::RunCommand;
pub(crate) use run_cursor_command::RunCursorCommand;
pub(crate) use update::{Update, UpdateOrReplace};
const SERVER_4_2_0_WIRE_VERSION: i32 = 8;
const SERVER_4_4_0_WIRE_VERSION: i32 = 9;
/// A trait modeling the behavior of a server side operation.
///
/// No methods in this trait should have default behaviors to ensure that wrapper operations
/// replicate all behavior. Default behavior is provided by the `OperationDefault` trait.
pub(crate) trait Operation {
/// The output type of this operation.
type O;
/// The format of the command body constructed in `build`.
type Command: CommandBody;
/// The name of the server side command associated with this operation.
const NAME: &'static str;
/// Returns the command that should be sent to the server as part of this operation.
/// The operation may store some additional state that is required for handling the response.
fn build(&mut self, description: &StreamDescription) -> Result<Command<Self::Command>>;
/// Perform custom serialization of the built command.
/// By default, this will just call through to the `Serialize` implementation of the command.
fn serialize_command(&mut self, cmd: Command<Self::Command>) -> Result<Vec<u8>>;
/// Parse the response for the atClusterTime field.
/// Depending on the operation, this may be found in different locations.
fn extract_at_cluster_time(&self, _response: &RawDocument) -> Result<Option<Timestamp>>;
/// Interprets the server response to the command.
fn handle_response(
&self,
response: RawCommandResponse,
description: &StreamDescription,
) -> Result<Self::O>;
/// Interpret an error encountered while sending the built command to the server, potentially
/// recovering.
fn handle_error(&self, error: Error) -> Result<Self::O>;
/// Criteria to use for selecting the server that this operation will be executed on.
fn selection_criteria(&self) -> Option<&SelectionCriteria>;
/// Whether or not this operation will request acknowledgment from the server.
fn is_acknowledged(&self) -> bool;
/// The write concern to use for this operation, if any.
fn write_concern(&self) -> Option<&WriteConcern>;
/// Returns whether or not this command supports the `readConcern` field.
fn supports_read_concern(&self, _description: &StreamDescription) -> bool;
/// Whether this operation supports sessions or not.
fn supports_sessions(&self) -> bool;
/// The level of retryability the operation supports.
fn retryability(&self) -> Retryability;
/// Updates this operation as needed for a retry.
fn update_for_retry(&mut self);
fn pinned_connection(&self) -> Option<&PinnedConnectionHandle>;
fn name(&self) -> &str;
}
pub(crate) trait CommandBody: Serialize {
fn should_redact(&self) -> bool {
false
}
}
impl CommandBody for Document {
fn should_redact(&self) -> bool {
if let Some(command_name) = bson_util::first_key(self) {
HELLO_COMMAND_NAMES.contains(command_name.to_lowercase().as_str())
&& self.contains_key("speculativeAuthenticate")
} else {
false
}
}
}
impl CommandBody for RawDocumentBuf {
fn should_redact(&self) -> bool {
if let Some(Ok((command_name, _))) = self.into_iter().next() {
HELLO_COMMAND_NAMES.contains(command_name.to_lowercase().as_str())
&& self.get("speculativeAuthenticate").ok().flatten().is_some()
} else {
false
}
}
}
impl<T: CommandBody> Command<T> {
pub(crate) fn should_redact(&self) -> bool {
let name = self.name.to_lowercase();
REDACTED_COMMANDS.contains(name.as_str()) || self.body.should_redact()
}
pub(crate) fn should_compress(&self) -> bool {
let name = self.name.to_lowercase();
!REDACTED_COMMANDS.contains(name.as_str()) && !HELLO_COMMAND_NAMES.contains(name.as_str())
}
}
/// A response to a command with a body shaped deserialized to a `T`.
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub(crate) struct CommandResponse<T> {
pub(crate) ok: Bson,
#[serde(rename = "$clusterTime")]
pub(crate) cluster_time: Option<ClusterTime>,
#[serde(flatten)]
pub(crate) body: T,
}
impl<T: DeserializeOwned> CommandResponse<T> {
/// Whether the command succeeeded or not (i.e. if this response is ok: 1).
pub(crate) fn is_success(&self) -> bool {
bson_util::get_int(&self.ok) == Some(1)
}
pub(crate) fn cluster_time(&self) -> Option<&ClusterTime> {
self.cluster_time.as_ref()
}
}
/// A response body useful for deserializing command errors.
#[derive(Deserialize, Debug)]
pub(crate) struct CommandErrorBody {
#[serde(rename = "errorLabels")]
pub(crate) error_labels: Option<Vec<String>>,
#[serde(flatten)]
pub(crate) command_error: CommandError,
}
impl From<CommandErrorBody> for Error {
fn from(command_error_response: CommandErrorBody) -> Error {
Error::new(
ErrorKind::Command(command_error_response.command_error),
command_error_response.error_labels,
)
}
}
/// Appends a serializable struct to the input document. The serializable struct MUST serialize to a
/// Document; otherwise, an error will be thrown.
pub(crate) fn append_options<T: Serialize + Debug>(
doc: &mut Document,
options: Option<&T>,
) -> Result<()> {
if let Some(options) = options {
let options_doc = bson::to_document(options)?;
doc.extend(options_doc);
}
Ok(())
}
pub(crate) fn append_options_to_raw_document<T: Serialize>(
doc: &mut RawDocumentBuf,
options: Option<&T>,
) -> Result<()> {
if let Some(options) = options {
let options_raw_doc = bson::to_raw_document_buf(options)?;
extend_raw_document_buf(doc, options_raw_doc)?;
}
Ok(())
}
#[derive(Deserialize, Debug)]
pub(crate) struct EmptyBody {}
/// Body of a write response that could possibly have a write concern error but not write errors.
#[derive(Debug, Deserialize, Default, Clone)]
pub(crate) struct WriteConcernOnlyBody {
#[serde(rename = "writeConcernError")]
write_concern_error: Option<WriteConcernError>,
#[serde(rename = "errorLabels")]
labels: Option<Vec<String>>,
}
impl WriteConcernOnlyBody {
fn validate(&self) -> Result<()> {
match self.write_concern_error {
Some(ref wc_error) => Err(Error::new(
ErrorKind::Write(WriteFailure::WriteConcernError(wc_error.clone())),
self.labels.clone(),
)),
None => Ok(()),
}
}
}
#[derive(Deserialize, Debug)]
pub(crate) struct WriteResponseBody<T = EmptyBody> {
#[serde(flatten)]
body: T,
n: u64,
#[serde(rename = "writeErrors")]
write_errors: Option<Vec<BulkWriteError>>,
#[serde(rename = "writeConcernError")]
write_concern_error: Option<WriteConcernError>,
#[serde(rename = "errorLabels")]
labels: Option<Vec<String>>,
}
impl<T> WriteResponseBody<T> {
fn validate(&self) -> Result<()> {
if self.write_errors.is_none() && self.write_concern_error.is_none() {
return Ok(());
};
let failure = BulkWriteFailure {
write_errors: self.write_errors.clone(),
write_concern_error: self.write_concern_error.clone(),
inserted_ids: Default::default(),
};
Err(Error::new(
ErrorKind::BulkWrite(failure),
self.labels.clone(),
))
}
}
impl<T> Deref for WriteResponseBody<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.body
}
}
#[derive(Debug, Deserialize)]
pub(crate) struct CursorBody {
cursor: CursorInfo,
}
impl CursorBody {
fn extract_at_cluster_time(response: &RawDocument) -> Result<Option<Timestamp>> {
Ok(response
.get("cursor")?
.and_then(RawBsonRef::as_document)
.map(|d| d.get("atClusterTime"))
.transpose()?
.flatten()
.and_then(RawBsonRef::as_timestamp))
}
}
#[derive(Debug, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub(crate) struct CursorInfo {
pub(crate) id: i64,
pub(crate) ns: Namespace,
pub(crate) first_batch: VecDeque<RawDocumentBuf>,
pub(crate) post_batch_resume_token: Option<RawDocumentBuf>,
}
/// Type used to deserialize just the first result from a cursor, if any.
#[derive(Debug, Clone)]
pub(crate) struct SingleCursorResult<T>(Option<T>);
impl<'de, T> Deserialize<'de> for SingleCursorResult<T>
where
T: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
#[derive(Deserialize)]
struct FullCursorBody<T> {
cursor: InteriorBody<T>,
}
#[derive(Deserialize)]
struct InteriorBody<T> {
#[serde(rename = "firstBatch")]
first_batch: Vec<T>,
}
let mut full_body = FullCursorBody::deserialize(deserializer)?;
Ok(SingleCursorResult(full_body.cursor.first_batch.pop()))
}
}
#[derive(Debug, PartialEq, Clone, Copy)]
pub(crate) enum Retryability {
Write,
Read,
None,
}
macro_rules! remove_empty_write_concern {
($opts:expr) => {
if let Some(ref mut options) = $opts {
if let Some(ref write_concern) = options.write_concern {
if write_concern.is_empty() {
options.write_concern = None;
}
}
}
};
}
pub(crate) use remove_empty_write_concern;
// A mirror of the `Operation` trait, with default behavior where appropriate. Should only be
// implemented by operation types that do not delegate to other operations.
pub(crate) trait OperationWithDefaults {
/// The output type of this operation.
type O;
/// The format of the command body constructed in `build`.
type Command: CommandBody;
/// The name of the server side command associated with this operation.
const NAME: &'static str;
/// Returns the command that should be sent to the server as part of this operation.
/// The operation may store some additional state that is required for handling the response.
fn build(&mut self, description: &StreamDescription) -> Result<Command<Self::Command>>;
/// Perform custom serialization of the built command.
/// By default, this will just call through to the `Serialize` implementation of the command.
fn serialize_command(&mut self, cmd: Command<Self::Command>) -> Result<Vec<u8>> {
Ok(bson::to_vec(&cmd)?)
}
/// Parse the response for the atClusterTime field.
/// Depending on the operation, this may be found in different locations.
fn extract_at_cluster_time(&self, _response: &RawDocument) -> Result<Option<Timestamp>> {
Ok(None)
}
/// Interprets the server response to the command.
fn handle_response(
&self,
response: RawCommandResponse,
description: &StreamDescription,
) -> Result<Self::O>;
/// Interpret an error encountered while sending the built command to the server, potentially
/// recovering.
fn handle_error(&self, error: Error) -> Result<Self::O> {
Err(error)
}
/// Criteria to use for selecting the server that this operation will be executed on.
fn selection_criteria(&self) -> Option<&SelectionCriteria> {
None
}
/// Whether or not this operation will request acknowledgment from the server.
fn is_acknowledged(&self) -> bool {
self.write_concern()
.map(WriteConcern::is_acknowledged)
.unwrap_or(true)
}
/// The write concern to use for this operation, if any.
fn write_concern(&self) -> Option<&WriteConcern> {
None
}
/// Returns whether or not this command supports the `readConcern` field.
fn supports_read_concern(&self, _description: &StreamDescription) -> bool {
false
}
/// Whether this operation supports sessions or not.
fn supports_sessions(&self) -> bool {
true
}
/// The level of retryability the operation supports.
fn retryability(&self) -> Retryability {
Retryability::None
}
/// Updates this operation as needed for a retry.
fn update_for_retry(&mut self) {}
fn pinned_connection(&self) -> Option<&PinnedConnectionHandle> {
None
}
fn name(&self) -> &str {
Self::NAME
}
}
impl<T: OperationWithDefaults> Operation for T {
type O = T::O;
type Command = T::Command;
const NAME: &'static str = T::NAME;
fn build(&mut self, description: &StreamDescription) -> Result<Command<Self::Command>> {
self.build(description)
}
fn serialize_command(&mut self, cmd: Command<Self::Command>) -> Result<Vec<u8>> {
self.serialize_command(cmd)
}
fn extract_at_cluster_time(&self, response: &RawDocument) -> Result<Option<Timestamp>> {
self.extract_at_cluster_time(response)
}
fn handle_response(
&self,
response: RawCommandResponse,
description: &StreamDescription,
) -> Result<Self::O> {
self.handle_response(response, description)
}
fn handle_error(&self, error: Error) -> Result<Self::O> {
self.handle_error(error)
}
fn selection_criteria(&self) -> Option<&SelectionCriteria> {
self.selection_criteria()
}
fn is_acknowledged(&self) -> bool {
self.is_acknowledged()
}
fn write_concern(&self) -> Option<&WriteConcern> {
self.write_concern()
}
fn supports_read_concern(&self, description: &StreamDescription) -> bool {
self.supports_read_concern(description)
}
fn supports_sessions(&self) -> bool {
self.supports_sessions()
}
fn retryability(&self) -> Retryability {
self.retryability()
}
fn update_for_retry(&mut self) {
self.update_for_retry()
}
fn pinned_connection(&self) -> Option<&PinnedConnectionHandle> {
self.pinned_connection()
}
fn name(&self) -> &str {
self.name()
}
}
|
use crate::rtb_type_strict;
rtb_type_strict! {
CompanionAdRenderingMode,
Concurrent=1;
EndCard=2
}
|
// VIEWPORT
pub const VIEWPORT_ZOOM_WHEEL_RATE: f64 = 1. / 600.;
pub const VIEWPORT_ZOOM_MOUSE_RATE: f64 = 1. / 400.;
pub const VIEWPORT_ZOOM_SCALE_MIN: f64 = 0.000_000_1;
pub const VIEWPORT_ZOOM_SCALE_MAX: f64 = 10_000.;
pub const VIEWPORT_ZOOM_LEVELS: [f64; 74] = [
0.0001, 0.000125, 0.00016, 0.0002, 0.00025, 0.00032, 0.0004, 0.0005, 0.00064, 0.0008, 0.001, 0.0016, 0.002, 0.0025, 0.0032, 0.004, 0.005, 0.0064, 0.008, 0.01, 0.01125, 0.015, 0.02, 0.025, 0.03,
0.04, 0.05, 0.06, 0.08, 0.1, 0.125, 0.15, 0.2, 0.25, 0.33333333, 0.4, 0.5, 0.66666666, 0.8, 1., 1.25, 1.6, 2., 2.5, 3.2, 4., 5., 6.4, 8., 10., 12.5, 16., 20., 25., 32., 40., 50., 64., 80., 100.,
128., 160., 200., 256., 320., 400., 512., 640., 800., 1024., 1280., 1600., 2048., 2560.,
];
pub const VIEWPORT_SCROLL_RATE: f64 = 0.6;
pub const VIEWPORT_ROTATE_SNAP_INTERVAL: f64 = 15.;
// LINE TOOL
pub const LINE_ROTATE_SNAP_ANGLE: f64 = 15.;
// SELECT TOOL
pub const SELECTION_TOLERANCE: f64 = 1.0;
// SCROLLBARS
pub const SCROLLBAR_SPACING: f64 = 0.1;
pub const ASYMPTOTIC_EFFECT: f64 = 0.5;
pub const SCALE_EFFECT: f64 = 0.5;
pub const DEFAULT_DOCUMENT_NAME: &str = "Untitled Document";
pub const FILE_SAVE_SUFFIX: &str = ".graphite";
pub const FILE_EXPORT_SUFFIX: &str = ".svg";
|
// Copyright 2019 Contributors to the Parsec project.
// SPDX-License-Identifier: Apache-2.0
#![deny(
nonstandard_style,
const_err,
dead_code,
improper_ctypes,
non_shorthand_field_patterns,
no_mangle_generic_items,
overflowing_literals,
path_statements,
patterns_in_fns_without_body,
private_in_public,
unconditional_recursion,
unused,
unused_allocation,
unused_comparisons,
unused_parens,
while_true,
missing_debug_implementations,
trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unused_results,
missing_copy_implementations
)]
// This one is hard to avoid.
#![allow(clippy::multiple_crate_versions)]
use cargo_toml::{Manifest, Value};
use serde::Deserialize;
use std::env;
use std::io::{Error, ErrorKind, Result};
use std::path::{Path, PathBuf};
const CONFIG_TABLE_NAME: &str = "config";
const MBED_CRYPTO_VERSION_KEY: &str = "mbed-crypto-version";
const SETUP_MBED_SCRIPT_PATH: &str = "./setup_mbed_crypto.sh";
const BUILD_CONFIG_FILE_PATH: &str = "./build-conf.toml";
const DEFAULT_NATIVE_MBED_COMPILER: &str = "clang";
const DEFAULT_NATIVE_MBED_ARCHIVER: &str = "ar";
const DEFAULT_ARM64_MBED_COMPILER: &str = "aarch64-linux-gnu-gcc";
const DEFAULT_ARM64_MBED_ARCHIVER: &str = "aarch64-linux-gnu-ar";
#[derive(Debug, Deserialize)]
struct Configuration {
mbed_config: Option<MbedConfig>,
}
#[derive(Debug, Deserialize)]
struct MbedConfig {
mbed_path: Option<String>,
native: Option<Toolchain>,
aarch64_unknown_linux_gnu: Option<Toolchain>,
}
#[derive(Debug, Deserialize)]
struct Toolchain {
mbed_compiler: Option<String>,
mbed_archiver: Option<String>,
}
fn get_configuration_string(parsec_config: &Value, key: &str) -> Result<String> {
let config_value = get_value_from_table(parsec_config, key)?;
match config_value {
Value::String(string) => Ok(string.clone()),
_ => Err(Error::new(
ErrorKind::InvalidInput,
"Configuration key missing",
)),
}
}
fn get_value_from_table<'a>(table: &'a Value, key: &str) -> Result<&'a Value> {
match table {
Value::Table(table) => table.get(key).ok_or_else(|| {
println!("Config table does not contain configuration key: {}", key);
Error::new(ErrorKind::InvalidInput, "Configuration key missing.")
}),
_ => Err(Error::new(
ErrorKind::InvalidInput,
"Value provided is not a TOML table",
)),
}
}
// Get the Mbed Crypto version to branch on from Cargo.toml file. Use that and MbedConfig to pass
// parameters to the setup_mbed_crypto.sh script which clones and builds Mbed Crypto and create
// a static library.
fn setup_mbed_crypto(mbed_config: &MbedConfig, mbed_version: &str) -> Result<()> {
let (mbed_compiler, mbed_archiver) =
if std::env::var("TARGET").unwrap() == "aarch64-unknown-linux-gnu" {
let toolchain;
toolchain = mbed_config
.aarch64_unknown_linux_gnu
.as_ref()
.ok_or_else(|| {
Error::new(
ErrorKind::InvalidInput,
"The aarch64_unknown_linux_gnu subtable of mbed_config should exist",
)
})?;
(
toolchain
.mbed_compiler
.clone()
.unwrap_or_else(|| DEFAULT_ARM64_MBED_COMPILER.to_string()),
toolchain
.mbed_archiver
.clone()
.unwrap_or_else(|| DEFAULT_ARM64_MBED_ARCHIVER.to_string()),
)
} else {
let toolchain;
toolchain = mbed_config.native.as_ref().ok_or_else(|| {
Error::new(
ErrorKind::InvalidInput,
"The native subtable of mbed_config should exist",
)
})?;
(
toolchain
.mbed_compiler
.clone()
.unwrap_or_else(|| DEFAULT_NATIVE_MBED_COMPILER.to_string()),
toolchain
.mbed_archiver
.clone()
.unwrap_or_else(|| DEFAULT_NATIVE_MBED_ARCHIVER.to_string()),
)
};
let script_fail = |_| {
Err(Error::new(
ErrorKind::Other,
"setup_mbed_crypto.sh script failed",
))
};
if !::std::process::Command::new(SETUP_MBED_SCRIPT_PATH)
.arg(mbed_version)
.arg(
mbed_config
.mbed_path
.clone()
.unwrap_or_else(|| env::var("OUT_DIR").unwrap()),
)
.arg(format!("CC={}", mbed_compiler))
.arg(format!("AR={}", mbed_archiver))
.status()
.or_else(script_fail)?
.success()
{
Err(Error::new(
ErrorKind::Other,
"setup_mbed_crypto.sh returned an error status.",
))
} else {
Ok(())
}
}
fn generate_mbed_bindings(mbed_config: &MbedConfig, mbed_version: &str) -> Result<()> {
let mbed_include_dir = mbed_config
.mbed_path
.clone()
.unwrap_or_else(|| env::var("OUT_DIR").unwrap())
+ "/mbed-crypto-"
+ mbed_version
+ "/include";
let header = mbed_include_dir.clone() + "/psa/crypto.h";
println!("cargo:rerun-if-changed={}", header);
let bindings = bindgen::Builder::default()
.clang_arg(format!("-I{}", mbed_include_dir))
.rustfmt_bindings(true)
.header(header)
.generate_comments(false)
.generate()
.or_else(|_| {
Err(Error::new(
ErrorKind::Other,
"Unable to generate bindings to mbed crypto",
))
})?;
let out_path = PathBuf::from(env::var("OUT_DIR").unwrap());
bindings.write_to_file(out_path.join("psa_crypto_bindings.rs"))
}
// Get the compiler, the archiver and the location where to clone the Mbed Crypto repository.
fn parse_config_file() -> Result<Configuration> {
let config_str = ::std::fs::read_to_string(Path::new(BUILD_CONFIG_FILE_PATH))?;
Ok(toml::from_str(&config_str).or_else(|e| {
println!("Error parsing build configuration file ({}).", e);
Err(Error::new(
ErrorKind::InvalidInput,
"Could not parse build configuration file.",
))
})?)
}
fn main() -> Result<()> {
// Parsing build-conf.toml
let config = parse_config_file()?;
// Parsing Cargo.toml
let toml_path = std::path::Path::new("./Cargo.toml");
if !toml_path.exists() {
return Err(Error::new(
ErrorKind::InvalidInput,
"Could not find Cargo.toml.",
));
}
let manifest = Manifest::from_path(&toml_path).or_else(|e| {
println!("Error parsing Cargo.toml ({}).", e);
Err(Error::new(
ErrorKind::InvalidInput,
"Could not parse Cargo.toml.",
))
})?;
let package = manifest.package.ok_or_else(|| {
Error::new(
ErrorKind::InvalidInput,
"Cargo.toml does not contain package information.",
)
})?;
let metadata = package.metadata.ok_or_else(|| {
Error::new(
ErrorKind::InvalidInput,
"Cargo.toml does not contain package metadata.",
)
})?;
let parsec_config = get_value_from_table(&metadata, CONFIG_TABLE_NAME)?;
if cfg!(feature = "mbed-crypto-provider") {
let mbed_config = config.mbed_config.ok_or_else(|| {
Error::new(
ErrorKind::InvalidInput,
"Could not find mbed_config table in the config file.",
)
})?;
let mbed_version = get_configuration_string(&parsec_config, MBED_CRYPTO_VERSION_KEY)?;
setup_mbed_crypto(&mbed_config, &mbed_version)?;
generate_mbed_bindings(&mbed_config, &mbed_version)?;
// Request rustc to link the Mbed Crypto static library
println!(
"cargo:rustc-link-search=native={}/mbed-crypto-{}/library/",
mbed_config
.mbed_path
.unwrap_or_else(|| env::var("OUT_DIR").unwrap()),
mbed_version,
);
println!("cargo:rustc-link-lib=static=mbedcrypto");
}
Ok(())
}
|
#[doc = "Reader of register INTR_USBHOST_SET"]
pub type R = crate::R<u32, super::INTR_USBHOST_SET>;
#[doc = "Writer for register INTR_USBHOST_SET"]
pub type W = crate::W<u32, super::INTR_USBHOST_SET>;
#[doc = "Register INTR_USBHOST_SET `reset()`'s with value 0"]
impl crate::ResetValue for super::INTR_USBHOST_SET {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `SOFIRQS`"]
pub type SOFIRQS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SOFIRQS`"]
pub struct SOFIRQS_W<'a> {
w: &'a mut W,
}
impl<'a> SOFIRQS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `DIRQS`"]
pub type DIRQS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DIRQS`"]
pub struct DIRQS_W<'a> {
w: &'a mut W,
}
impl<'a> DIRQS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `CNNIRQS`"]
pub type CNNIRQS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CNNIRQS`"]
pub struct CNNIRQS_W<'a> {
w: &'a mut W,
}
impl<'a> CNNIRQS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `CMPIRQS`"]
pub type CMPIRQS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CMPIRQS`"]
pub struct CMPIRQS_W<'a> {
w: &'a mut W,
}
impl<'a> CMPIRQS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `URIRQS`"]
pub type URIRQS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `URIRQS`"]
pub struct URIRQS_W<'a> {
w: &'a mut W,
}
impl<'a> URIRQS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `RWKIRQS`"]
pub type RWKIRQS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RWKIRQS`"]
pub struct RWKIRQS_W<'a> {
w: &'a mut W,
}
impl<'a> RWKIRQS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `RSVD_6`"]
pub type RSVD_6_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RSVD_6`"]
pub struct RSVD_6_W<'a> {
w: &'a mut W,
}
impl<'a> RSVD_6_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `TCANS`"]
pub type TCANS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `TCANS`"]
pub struct TCANS_W<'a> {
w: &'a mut W,
}
impl<'a> TCANS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bit 0 - This bit sets SOFIRQ bit. If this bit is written to '1', SOFIRQ is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn sofirqs(&self) -> SOFIRQS_R {
SOFIRQS_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - This bit sets DIRQ bit. If this bit is written to '1', DIRQ is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn dirqs(&self) -> DIRQS_R {
DIRQS_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - This bit sets CNNIRQ bit. If this bit is written to '1', CNNIRQ is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn cnnirqs(&self) -> CNNIRQS_R {
CNNIRQS_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - This bit sets CMPIRQ bit. If this bit is written to '1', CMPIRQ is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn cmpirqs(&self) -> CMPIRQS_R {
CMPIRQS_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - This bit sets URIRQ bit. If this bit is written to '1', URIRQ is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn urirqs(&self) -> URIRQS_R {
URIRQS_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - This bit sets RWKIRQ bit. If this bit is written to '1', RWKIRQ is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn rwkirqs(&self) -> RWKIRQS_R {
RWKIRQS_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - BCNFTEST interrupt. This bit is test bit"]
#[inline(always)]
pub fn rsvd_6(&self) -> RSVD_6_R {
RSVD_6_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - This bit sets TCAN bit. If this bit is written to '1', TCAN is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn tcans(&self) -> TCANS_R {
TCANS_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - This bit sets SOFIRQ bit. If this bit is written to '1', SOFIRQ is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn sofirqs(&mut self) -> SOFIRQS_W {
SOFIRQS_W { w: self }
}
#[doc = "Bit 1 - This bit sets DIRQ bit. If this bit is written to '1', DIRQ is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn dirqs(&mut self) -> DIRQS_W {
DIRQS_W { w: self }
}
#[doc = "Bit 2 - This bit sets CNNIRQ bit. If this bit is written to '1', CNNIRQ is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn cnnirqs(&mut self) -> CNNIRQS_W {
CNNIRQS_W { w: self }
}
#[doc = "Bit 3 - This bit sets CMPIRQ bit. If this bit is written to '1', CMPIRQ is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn cmpirqs(&mut self) -> CMPIRQS_W {
CMPIRQS_W { w: self }
}
#[doc = "Bit 4 - This bit sets URIRQ bit. If this bit is written to '1', URIRQ is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn urirqs(&mut self) -> URIRQS_W {
URIRQS_W { w: self }
}
#[doc = "Bit 5 - This bit sets RWKIRQ bit. If this bit is written to '1', RWKIRQ is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn rwkirqs(&mut self) -> RWKIRQS_W {
RWKIRQS_W { w: self }
}
#[doc = "Bit 6 - BCNFTEST interrupt. This bit is test bit"]
#[inline(always)]
pub fn rsvd_6(&mut self) -> RSVD_6_W {
RSVD_6_W { w: self }
}
#[doc = "Bit 7 - This bit sets TCAN bit. If this bit is written to '1', TCAN is set to '1'. However, if this bit is written with '0', its value is ignored."]
#[inline(always)]
pub fn tcans(&mut self) -> TCANS_W {
TCANS_W { w: self }
}
}
|
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT license.
*/
#![allow(dead_code)] // Todo: Remove this when the disk index query code is complete.
use crate::common::ANNError;
use platform::{FileHandle, IOCompletionPort};
// The IOContext struct for disk I/O. One for each thread.
pub struct IOContext {
pub status: Status,
pub file_handle: FileHandle,
pub io_completion_port: IOCompletionPort,
}
impl Default for IOContext {
fn default() -> Self {
IOContext {
status: Status::ReadWait,
file_handle: FileHandle::default(),
io_completion_port: IOCompletionPort::default(),
}
}
}
impl IOContext {
pub fn new() -> Self {
Self::default()
}
}
pub enum Status {
ReadWait,
ReadSuccess,
ReadFailed(ANNError),
ProcessComplete,
}
|
use clap::{App, AppSettings, Arg, SubCommand};
use std::io::Write;
mod commands;
use self::commands::*;
pub const TANINDEX: &str = "/tmp/tantivy/idxhn";
fn main() {
let cli_options = App::new("Tantivy Hackernews")
.setting(AppSettings::SubcommandRequiredElseHelp)
.version(env!("CARGO_PKG_VERSION"))
.author("Michael Angerman <stormasm@gmail.com>")
.about("Tantivy Search for Hackernews.")
.subcommand(
SubCommand::with_name("domain").about("Index files").arg(
Arg::with_name("file")
.short("f")
.long("file")
.value_name("file")
.help("File containing the documents to index."),
),
)
.subcommand(
SubCommand::with_name("time").about("Index files").arg(
Arg::with_name("file")
.short("f")
.long("file")
.value_name("file")
.help("File containing the documents to index."),
),
)
.get_matches();
let (subcommand, some_options) = cli_options.subcommand();
let options = some_options.unwrap();
let run_cli = match subcommand {
"domain" => run_domain_cli,
"time" => run_time_cli,
_ => panic!("Subcommand {} is unknown", subcommand),
};
if let Err(ref e) = run_cli(options) {
let stderr = &mut std::io::stderr();
let errmsg = "Error writing ot stderr";
writeln!(stderr, "{}", e).expect(errmsg);
std::process::exit(1);
}
}
|
pub mod attributes;
pub mod events;
pub use attributes::Attributes;
pub use events::Events;
use super::component::Component;
use super::component::Composable;
/// viritual html element
pub enum Html<Msg> {
Composable(Box<dyn Composable>),
TextNode(String),
ElementNode {
tag_name: String,
children: Vec<Html<Msg>>,
attributes: Attributes,
events: Events<Msg>,
},
}
impl<Msg> Html<Msg> {
/// Creates Html<Msg> from component
pub fn component<M, S, B>(component: Component<M, S, B>) -> Self
where
M: 'static,
S: 'static,
B: 'static,
{
Html::Composable(Box::new(component))
}
/// Creates Html<Msg> from a non-validated text
pub fn text(text: impl Into<String>) -> Self {
Html::TextNode(text.into())
}
/// Creates Html<Msg> from element
pub fn node(
tag_name: impl Into<String>,
attributes: Attributes,
events: Events<Msg>,
children: Vec<Html<Msg>>,
) -> Self {
Html::ElementNode {
tag_name: tag_name.into(),
children,
attributes,
events,
}
}
pub fn a(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("a", attributes, events, children)
}
pub fn abbr(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("abbr", attributes, events, children)
}
pub fn address(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("address", attributes, events, children)
}
pub fn area(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("area", attributes, events, children)
}
pub fn article(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("article", attributes, events, children)
}
pub fn aside(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("aside", attributes, events, children)
}
pub fn audio(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("audio", attributes, events, children)
}
pub fn b(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("b", attributes, events, children)
}
pub fn bdi(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("bdi", attributes, events, children)
}
pub fn bdo(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("bdo", attributes, events, children)
}
pub fn blockquote(
attributes: Attributes,
events: Events<Msg>,
children: Vec<Html<Msg>>,
) -> Self {
Html::node("blockquote", attributes, events, children)
}
pub fn button(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("button", attributes, events, children)
}
pub fn br(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("br", attributes, events, children)
}
pub fn cite(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("cite", attributes, events, children)
}
pub fn caption(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("caption", attributes, events, children)
}
pub fn canvas(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("canvas", attributes, events, children)
}
pub fn code(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("code", attributes, events, children)
}
pub fn col(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("col", attributes, events, children)
}
pub fn colgroup(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("colgroup", attributes, events, children)
}
pub fn datalist(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("datalist", attributes, events, children)
}
pub fn details(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("details", attributes, events, children)
}
pub fn dd(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("dd", attributes, events, children)
}
pub fn dfn(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("dfn", attributes, events, children)
}
pub fn div(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("div", attributes, events, children)
}
pub fn data(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("data", attributes, events, children)
}
pub fn del(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("del", attributes, events, children)
}
pub fn dl(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("dl", attributes, events, children)
}
pub fn dt(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("dt", attributes, events, children)
}
pub fn em(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("em", attributes, events, children)
}
pub fn embed(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("embed", attributes, events, children)
}
pub fn fieldset(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("fieldset", attributes, events, children)
}
pub fn figcaption(
attributes: Attributes,
events: Events<Msg>,
children: Vec<Html<Msg>>,
) -> Self {
Html::node("figcaption", attributes, events, children)
}
pub fn figure(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("figure", attributes, events, children)
}
pub fn footer(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("footer", attributes, events, children)
}
pub fn form(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("form", attributes, events, children)
}
pub fn h1(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("h1", attributes, events, children)
}
pub fn h2(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("h2", attributes, events, children)
}
pub fn h3(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("h3", attributes, events, children)
}
pub fn h4(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("h4", attributes, events, children)
}
pub fn h5(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("h5", attributes, events, children)
}
pub fn h6(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("h6", attributes, events, children)
}
pub fn header(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("header", attributes, events, children)
}
pub fn hr(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("hr", attributes, events, children)
}
pub fn i(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("i", attributes, events, children)
}
pub fn iframe(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("iframe", attributes, events, children)
}
pub fn img(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("img", attributes, events, children)
}
pub fn input(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("input", attributes, events, children)
}
pub fn ins(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("ins", attributes, events, children)
}
pub fn kbd(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("kbd", attributes, events, children)
}
pub fn label(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("label", attributes, events, children)
}
pub fn legend(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("legend", attributes, events, children)
}
pub fn li(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("li", attributes, events, children)
}
pub fn main(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("main", attributes, events, children)
}
pub fn mark(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("mark", attributes, events, children)
}
pub fn map(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("map", attributes, events, children)
}
pub fn menu(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("menu", attributes, events, children)
}
pub fn menuitem(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("menuitem", attributes, events, children)
}
pub fn meter(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("meter", attributes, events, children)
}
pub fn nav(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("nav", attributes, events, children)
}
pub fn object(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("object", attributes, events, children)
}
pub fn ol(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("ol", attributes, events, children)
}
pub fn optgroup(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("optgroup", attributes, events, children)
}
pub fn option(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("option", attributes, events, children)
}
pub fn output(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("output", attributes, events, children)
}
pub fn p(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("p", attributes, events, children)
}
pub fn param(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("param", attributes, events, children)
}
pub fn picture(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("picture", attributes, events, children)
}
pub fn pre(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("pre", attributes, events, children)
}
pub fn progress(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("progress", attributes, events, children)
}
pub fn q(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("q", attributes, events, children)
}
pub fn rb(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("rb", attributes, events, children)
}
pub fn rp(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("rp", attributes, events, children)
}
pub fn rt(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("rt", attributes, events, children)
}
pub fn rtc(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("rtc", attributes, events, children)
}
pub fn rubu(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("ruby", attributes, events, children)
}
pub fn s(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("s", attributes, events, children)
}
pub fn samp(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("samp", attributes, events, children)
}
pub fn section(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("section", attributes, events, children)
}
pub fn select(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("select", attributes, events, children)
}
pub fn small(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("small", attributes, events, children)
}
pub fn source(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("source", attributes, events, children)
}
pub fn span(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("span", attributes, events, children)
}
pub fn strong(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("strong", attributes, events, children)
}
pub fn sub(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("sub", attributes, events, children)
}
pub fn summary(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("summary", attributes, events, children)
}
pub fn sup(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("sup", attributes, events, children)
}
pub fn table(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("table", attributes, events, children)
}
pub fn tbody(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("tbody", attributes, events, children)
}
pub fn td(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("td", attributes, events, children)
}
pub fn textarea(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("textarea", attributes, events, children)
}
pub fn tfoot(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("tfoot", attributes, events, children)
}
pub fn th(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("th", attributes, events, children)
}
pub fn thead(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("thead", attributes, events, children)
}
pub fn time(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("time", attributes, events, children)
}
pub fn tr(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("tr", attributes, events, children)
}
pub fn track(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("track", attributes, events, children)
}
pub fn u(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("u", attributes, events, children)
}
pub fn ul(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("ul", attributes, events, children)
}
pub fn var(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("var", attributes, events, children)
}
pub fn video(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("video", attributes, events, children)
}
pub fn wbr(attributes: Attributes, events: Events<Msg>, children: Vec<Html<Msg>>) -> Self {
Html::node("wbr", attributes, events, children)
}
}
|
use assert_cmd::prelude::CommandCargoExt;
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use kvs::KvsClient;
use rand::distributions::Alphanumeric;
use rand::prelude::*;
use rand::Rng;
use std::{
fmt,
process::Command,
sync::mpsc,
thread,
time::{Duration, Instant},
};
use tempfile::TempDir;
#[derive(Debug)]
struct Para {
engine: String,
pool: String,
key: Vec<String>,
value: Vec<String>,
}
impl Para {
fn new(engine: String, pool: String, len: usize) -> Para {
let mut rng: StdRng = rand::SeedableRng::seed_from_u64(1);
Para {
engine,
pool,
key: random_string_with_length(&mut rng, len),
value: random_string_with_length(&mut rng, len),
}
}
}
impl fmt::Display for Para {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "(Engine: {}, ThreadPool: {})", self.engine, self.pool)
}
}
const RANDOM_LENGTH: usize = 100;
fn random_string_with_length(rng: &mut StdRng, len: usize) -> Vec<String> {
let mut ret = vec![];
for _ in 0..len {
ret.push(rng.sample_iter(&Alphanumeric).take(RANDOM_LENGTH).collect());
}
ret
}
fn thread_pool_write_bench(c: &mut Criterion) {
let mut group = c.benchmark_group("thread_pool_write");
vec!["kvs", "sled"]
.iter()
.map(|engine| {
vec!["naive", "share", "rayon"]
.iter()
.map(|pool| {
let para = Para::new(engine.to_string(), pool.to_string(), 100);
group.bench_with_input(
BenchmarkId::new(engine.to_string() + "-" + pool, ¶),
¶,
|b, s| {
b.iter_custom(|_iters| {
let (sender, receiver) = mpsc::sync_channel(0);
let temp_dir = TempDir::new().unwrap();
let mut server = Command::cargo_bin("kvs-server").unwrap();
let mut child = server
.args(&[
"--engine",
¶.engine,
"--threadpool",
¶.pool,
"--addr",
"127.0.0.1:5000",
])
.env("RUST_LOG", "warn")
.current_dir(&temp_dir)
.spawn()
.unwrap();
let handle = thread::spawn(move || {
let _ = receiver.recv(); // wait for main thread to finish
child.kill().expect("server exited before killed");
});
let threaded_rt = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap();
thread::sleep(Duration::from_secs(1));
let start = Instant::now();
for i in 0..s.key.len() {
let mut client =
KvsClient::new("127.0.0.1:5000".parse().unwrap());
threaded_rt
.block_on(
client.set(s.key[i].to_owned(), s.value[i].to_owned()),
)
.expect("Unable set");
}
let ret = start.elapsed();
sender.send(()).unwrap();
handle.join().unwrap();
ret
});
},
);
})
.for_each(|_| {})
})
.for_each(|_| {});
group.finish();
}
fn thread_pool_get_bench(c: &mut Criterion) {
let mut group = c.benchmark_group("thread_pool_get");
vec!["kvs", "sled"]
.iter()
.map(|engine| {
vec!["naive", "share", "rayon"]
.iter()
.map(|pool| {
let para = Para::new(engine.to_string(), pool.to_string(), 100);
group.bench_with_input(
BenchmarkId::new(engine.to_string() + "-" + pool, ¶),
¶,
|b, s| {
b.iter_custom(|_iters| {
let (sender, receiver) = mpsc::sync_channel(0);
let temp_dir = TempDir::new().unwrap();
let mut server = Command::cargo_bin("kvs-server").unwrap();
let mut child = server
.args(&[
"--engine",
¶.engine,
"--threadpool",
¶.pool,
"--addr",
"127.0.0.1:5000",
])
.env("RUST_LOG", "warn")
.current_dir(&temp_dir)
.spawn()
.unwrap();
let handle = thread::spawn(move || {
let _ = receiver.recv(); // wait for main thread to finish
child.kill().expect("server exited before killed");
});
let threaded_rt = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap();
thread::sleep(Duration::from_secs(1));
for i in 0..s.key.len() {
let mut client =
KvsClient::new("127.0.0.1:5000".parse().unwrap());
threaded_rt
.block_on(
client.set(s.key[i].to_owned(), s.value[i].to_owned()),
)
.expect("Unable set");
}
let start = Instant::now();
for i in 0..s.key.len() {
let mut client =
KvsClient::new("127.0.0.1:5000".parse().unwrap());
threaded_rt
.block_on(client.get(s.key[i].to_owned()))
.expect("Unable get");
}
let ret = start.elapsed();
sender.send(()).unwrap();
handle.join().unwrap();
ret
});
},
);
})
.for_each(|_| {})
})
.for_each(|_| {});
group.finish();
}
criterion_group!(benches, thread_pool_write_bench, thread_pool_get_bench);
criterion_main!(benches);
|
// This file is part of Substrate.
// Copyright (C) 2018-2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! The Substrate runtime. This can be compiled with `#[no_std]`, ready for Wasm.
#![cfg_attr(not(feature = "std"), no_std)]
// `construct_runtime!` does a lot of recursion and requires us to increase the limit to 256.
#![recursion_limit = "256"]
use codec::{Decode, Encode, MaxEncodedLen};
use frame_support::{
construct_runtime,
dispatch::DispatchClass,
parameter_types,
traits::{
AsEnsureOriginWithArg, ConstU128, ConstU32, Currency, EitherOfDiverse, EqualPrivilegeOnly,
Everything, Imbalance, InstanceFilter, KeyOwnerProofSystem, LockIdentifier, Nothing,
OnUnbalanced, U128CurrencyToVote, WithdrawReasons,
},
weights::{
constants::{BlockExecutionWeight, ExtrinsicBaseWeight, RocksDbWeight, WEIGHT_PER_SECOND},
ConstantMultiplier, IdentityFee, Weight,
},
PalletId, RuntimeDebug,
};
use frame_system::{
limits::{BlockLength, BlockWeights},
EnsureRoot,
};
pub use node_primitives::{AccountId, Signature};
use node_primitives::{AccountIndex, Balance, BlockNumber, Hash, Index, Moment};
use pallet_grandpa::fg_primitives;
use pallet_grandpa::{AuthorityId as GrandpaId, AuthorityList as GrandpaAuthorityList};
use pallet_im_online::sr25519::AuthorityId as ImOnlineId;
use pallet_session::historical as pallet_session_historical;
pub use pallet_transaction_payment::{CurrencyAdapter, Multiplier, TargetedFeeAdjustment};
use pallet_transaction_payment::{FeeDetails, RuntimeDispatchInfo};
use sp_api::impl_runtime_apis;
use sp_authority_discovery::AuthorityId as AuthorityDiscoveryId;
use sp_core::{crypto::KeyTypeId, OpaqueMetadata, H160, H256, U256};
use sp_inherents::{CheckInherentsResult, InherentData};
use sp_runtime::traits::{
self, BlakeTwo256, Block as BlockT, Bounded, Convert, ConvertInto, DispatchInfoOf,
Dispatchable, NumberFor, OpaqueKeys, PostDispatchInfoOf, SaturatedConversion, StaticLookup,
UniqueSaturatedInto,
};
use sp_runtime::transaction_validity::{
TransactionPriority, TransactionSource, TransactionValidity, TransactionValidityError,
};
use sp_runtime::{
create_runtime_str, generic, impl_opaque_keys, ApplyExtrinsicResult, FixedPointNumber, Perbill,
Percent, Permill, Perquintill,
};
use sp_std::prelude::*;
#[cfg(any(feature = "std", test))]
use sp_version::NativeVersion;
use sp_version::RuntimeVersion;
use static_assertions::const_assert;
pub use pallet_micropayment;
use fp_rpc::{TransactionStatusV2 as TransactionStatus, TxPoolResponse};
use pallet_ethereum::{Call::transact, Transaction as EthereumTransaction};
use pallet_evm::FeeCalculator;
use pallet_evm::{Account as EVMAccount, EVMCurrencyAdapter, PairedAddressMapping, Runner};
mod precompiles;
use precompiles::FrontierPrecompiles;
#[cfg(any(feature = "std", test))]
pub use frame_system::Call as SystemCall;
#[cfg(any(feature = "std", test))]
pub use pallet_balances::Call as BalancesCall;
#[cfg(any(feature = "std", test))]
pub use pallet_staking::StakerStatus;
#[cfg(any(feature = "std", test))]
pub use sp_runtime::BuildStorage;
use sp_std::vec::Vec;
/// Implementations of some helper traits passed into runtime modules as associated types.
pub mod impls;
use impls::Author;
/// Constant values used within the runtime.
pub mod constants;
use constants::{currency::*, time::*};
use sp_runtime::generic::Era;
// Make the WASM binary available.
#[cfg(feature = "std")]
include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs"));
/// Wasm binary unwrapped. If built with `SKIP_WASM_BUILD`, the function panics.
#[cfg(feature = "std")]
pub fn wasm_binary_unwrap() -> &'static [u8] {
WASM_BINARY.expect(
"Development wasm binary is not available. This means the client is \
built with `SKIP_WASM_BUILD` flag and it is only usable for \
production chains. Please rebuild with the flag disabled.",
)
}
/// Opaque types. These are used by the CLI to instantiate machinery that don't need to know
/// the specifics of the runtime. They can then be made to be agnostic over specific formats
/// of data like extrinsics, allowing for them to continue syncing the network through upgrades
/// to even the core data structures.
pub mod opaque {
use super::*;
pub use sp_runtime::OpaqueExtrinsic as UncheckedExtrinsic;
/// Opaque block header type.
pub type Header = generic::Header<BlockNumber, BlakeTwo256>;
/// Opaque block type.
pub type Block = generic::Block<Header, UncheckedExtrinsic>;
/// Opaque block identifier type.
pub type BlockId = generic::BlockId<Block>;
impl_opaque_keys! {
pub struct SessionKeys {
pub babe: Babe,
pub grandpa: Grandpa,
}
}
}
/// Runtime version.
#[sp_version::runtime_version]
pub const VERSION: RuntimeVersion = RuntimeVersion {
spec_name: create_runtime_str!("deeper-chain"),
impl_name: create_runtime_str!("deeper-chain"),
authoring_version: 10,
// Per convention: if the runtime behavior changes, increment spec_version
// and set impl_version to 0. If only runtime
// implementation changes and behavior does not, then leave spec_version as
// is and increment impl_version.
spec_version: 43,
impl_version: 0,
apis: RUNTIME_API_VERSIONS,
transaction_version: 14,
state_version: 1,
};
/// The BABE epoch configuration at genesis.
pub const BABE_GENESIS_EPOCH_CONFIG: sp_consensus_babe::BabeEpochConfiguration =
sp_consensus_babe::BabeEpochConfiguration {
c: PRIMARY_PROBABILITY,
allowed_slots: sp_consensus_babe::AllowedSlots::PrimaryAndSecondaryPlainSlots,
};
/// Native version.
#[cfg(any(feature = "std", test))]
pub fn native_version() -> NativeVersion {
NativeVersion {
runtime_version: VERSION,
can_author_with: Default::default(),
}
}
type NegativeImbalance = <Balances as Currency<AccountId>>::NegativeImbalance;
pub struct DealWithFees;
impl OnUnbalanced<NegativeImbalance> for DealWithFees {
fn on_unbalanceds<B>(mut fees_then_tips: impl Iterator<Item = NegativeImbalance>) {
if let Some(fees) = fees_then_tips.next() {
// for fees, 80% to treasury, 20% to author
let mut split = fees.ration(80, 20);
if let Some(tips) = fees_then_tips.next() {
// for tips, if any, 80% to treasury, 20% to author (though this can be anything)
tips.ration_merge_into(80, 20, &mut split);
}
Treasury::on_unbalanced(split.0);
Author::on_unbalanced(split.1);
}
}
}
/// We assume that ~10% of the block weight is consumed by `on_initalize` handlers.
/// This is used to limit the maximal weight of a single extrinsic.
const AVERAGE_ON_INITIALIZE_RATIO: Perbill = Perbill::from_percent(10);
/// We allow `Normal` extrinsics to fill up the block up to 75%, the rest can be used
/// by Operational extrinsics.
const NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(75);
/// We allow for 2 seconds of compute with a 5 second average block time.
const MAXIMUM_BLOCK_WEIGHT: Weight = WEIGHT_PER_SECOND
.saturating_mul(2u64)
.set_proof_size(u64::MAX);
const WEIGHT_PER_GAS: u64 = 20_000;
parameter_types! {
pub const BlockHashCount: BlockNumber = 2400;
pub const Version: RuntimeVersion = VERSION;
pub RuntimeBlockLength: BlockLength =
BlockLength::max_with_normal_ratio(5 * 1024 * 1024, NORMAL_DISPATCH_RATIO);
pub RuntimeBlockWeights: BlockWeights = BlockWeights::builder()
.base_block(BlockExecutionWeight::get())
.for_class(DispatchClass::all(), |weights| {
weights.base_extrinsic = ExtrinsicBaseWeight::get();
})
.for_class(DispatchClass::Normal, |weights| {
weights.max_total = Some(NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT);
})
.for_class(DispatchClass::Operational, |weights| {
weights.max_total = Some(MAXIMUM_BLOCK_WEIGHT);
// Operational transactions have some extra reserved space, so that they
// are included even if block reached `MAXIMUM_BLOCK_WEIGHT`.
weights.reserved = Some(
MAXIMUM_BLOCK_WEIGHT - NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT
);
})
.avg_block_initialization(AVERAGE_ON_INITIALIZE_RATIO)
.build_or_panic();
pub const SS58Prefix: u8 = 42;
}
const_assert!(NORMAL_DISPATCH_RATIO.deconstruct() >= AVERAGE_ON_INITIALIZE_RATIO.deconstruct());
impl frame_system::Config for Runtime {
type BaseCallFilter = Everything;
type BlockWeights = RuntimeBlockWeights;
type BlockLength = RuntimeBlockLength;
type DbWeight = RocksDbWeight;
type RuntimeOrigin = RuntimeOrigin;
type RuntimeCall = RuntimeCall;
type Index = Index;
type BlockNumber = BlockNumber;
type Hash = Hash;
type Hashing = BlakeTwo256;
type AccountId = AccountId;
type Lookup = Indices;
type Header = generic::Header<BlockNumber, BlakeTwo256>;
type RuntimeEvent = RuntimeEvent;
type BlockHashCount = BlockHashCount;
type Version = Version;
type PalletInfo = PalletInfo;
type AccountData = pallet_balances::AccountData<Balance>;
type OnNewAccount = ();
type OnKilledAccount = ();
type SystemWeightInfo = frame_system::weights::SubstrateWeight<Runtime>;
type SS58Prefix = SS58Prefix;
type OnSetCode = ();
type MaxConsumers = frame_support::traits::ConstU32<16>;
}
impl pallet_randomness_collective_flip::Config for Runtime {}
impl pallet_utility::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type RuntimeCall = RuntimeCall;
type PalletsOrigin = OriginCaller;
type WeightInfo = pallet_utility::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
// One storage item; key size is 32; value is size 4+4+16+32 bytes = 56 bytes.
pub const DepositBase: Balance = deposit(1, 88);
// Additional storage item size of 32 bytes.
pub const DepositFactor: Balance = deposit(0, 32);
pub const MaxSignatories: u16 = 100;
}
impl pallet_multisig::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type RuntimeCall = RuntimeCall;
type Currency = Balances;
type DepositBase = DepositBase;
type DepositFactor = DepositFactor;
type MaxSignatories = MaxSignatories;
type WeightInfo = pallet_multisig::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
// One storage item; key size 32, value size 8; .
pub const ProxyDepositBase: Balance = deposit(1, 8);
// Additional storage item size of 33 bytes.
pub const ProxyDepositFactor: Balance = deposit(0, 33);
pub const MaxProxies: u16 = 32;
pub const AnnouncementDepositBase: Balance = deposit(1, 8);
pub const AnnouncementDepositFactor: Balance = deposit(0, 66);
pub const MaxPending: u16 = 32;
}
/// The type used to represent the kinds of proxying allowed.
#[derive(
Copy,
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Encode,
Decode,
RuntimeDebug,
MaxEncodedLen,
scale_info::TypeInfo,
)]
pub enum ProxyType {
Any,
NonTransfer,
Governance,
Staking,
}
impl Default for ProxyType {
fn default() -> Self {
Self::Any
}
}
impl InstanceFilter<RuntimeCall> for ProxyType {
fn filter(&self, c: &RuntimeCall) -> bool {
match self {
ProxyType::Any => true,
ProxyType::NonTransfer => !matches!(
c,
RuntimeCall::Balances(..)
| RuntimeCall::Vesting(pallet_vesting::Call::vested_transfer { .. })
| RuntimeCall::Indices(pallet_indices::Call::transfer { .. })
),
ProxyType::Governance => matches!(
c,
RuntimeCall::Democracy(..)
| RuntimeCall::Council(..)
| RuntimeCall::Society(..)
| RuntimeCall::TechnicalCommittee(..)
| RuntimeCall::Elections(..)
| RuntimeCall::Treasury(..)
),
ProxyType::Staking => matches!(c, RuntimeCall::Staking(..)),
}
}
fn is_superset(&self, o: &Self) -> bool {
match (self, o) {
(x, y) if x == y => true,
(ProxyType::Any, _) => true,
(_, ProxyType::Any) => false,
(ProxyType::NonTransfer, _) => true,
_ => false,
}
}
}
impl pallet_proxy::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type RuntimeCall = RuntimeCall;
type Currency = Balances;
type ProxyType = ProxyType;
type ProxyDepositBase = ProxyDepositBase;
type ProxyDepositFactor = ProxyDepositFactor;
type MaxProxies = MaxProxies;
type WeightInfo = pallet_proxy::weights::SubstrateWeight<Runtime>;
type MaxPending = MaxPending;
type CallHasher = BlakeTwo256;
type AnnouncementDepositBase = AnnouncementDepositBase;
type AnnouncementDepositFactor = AnnouncementDepositFactor;
}
parameter_types! {
pub MaximumSchedulerWeight: Weight = Perbill::from_percent(80) *
RuntimeBlockWeights::get().max_block;
pub const MaxScheduledPerBlock: u32 = 50;
pub const NoPreimagePostponement: Option<u32> = Some(10);
}
impl pallet_scheduler::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type RuntimeOrigin = RuntimeOrigin;
type PalletsOrigin = OriginCaller;
type RuntimeCall = RuntimeCall;
type MaximumWeight = MaximumSchedulerWeight;
type ScheduleOrigin = EitherOfDiverse<
EnsureRoot<AccountId>,
pallet_collective::EnsureProportionAtLeast<AccountId, CouncilCollective, 1, 2>,
>;
type MaxScheduledPerBlock = MaxScheduledPerBlock;
type WeightInfo = pallet_scheduler::weights::SubstrateWeight<Runtime>;
type OriginPrivilegeCmp = EqualPrivilegeOnly;
type Preimages = Preimage;
}
parameter_types! {
pub const PreimageMaxSize: u32 = 4096 * 1024;
pub const PreimageBaseDeposit: Balance = 1 * DOLLARS;
}
impl pallet_preimage::Config for Runtime {
type WeightInfo = pallet_preimage::weights::SubstrateWeight<Runtime>;
type RuntimeEvent = RuntimeEvent;
type Currency = Balances;
type ManagerOrigin = EnsureRoot<AccountId>;
type BaseDeposit = PreimageBaseDeposit;
type ByteDeposit = PreimageByteDeposit;
}
parameter_types! {
pub const EpochDuration: u64 = EPOCH_DURATION_IN_SLOTS;
pub const ExpectedBlockTime: Moment = MILLISECS_PER_BLOCK;
pub const ReportLongevity: u64 =
BondingDuration::get() as u64 * SessionsPerEra::get() as u64 * EpochDuration::get();
}
impl pallet_babe::Config for Runtime {
type EpochDuration = EpochDuration;
type ExpectedBlockTime = ExpectedBlockTime;
type EpochChangeTrigger = pallet_babe::ExternalTrigger;
type DisabledValidators = Session;
type KeyOwnerProofSystem = Historical;
type KeyOwnerProof = <Self::KeyOwnerProofSystem as KeyOwnerProofSystem<(
KeyTypeId,
pallet_babe::AuthorityId,
)>>::Proof;
type KeyOwnerIdentification = <Self::KeyOwnerProofSystem as KeyOwnerProofSystem<(
KeyTypeId,
pallet_babe::AuthorityId,
)>>::IdentificationTuple;
type HandleEquivocation =
pallet_babe::EquivocationHandler<Self::KeyOwnerIdentification, Offences, ReportLongevity>;
type WeightInfo = ();
type MaxAuthorities = MaxAuthorities;
}
parameter_types! {
pub const IndexDeposit: Balance = 1 * DPR;
}
impl pallet_indices::Config for Runtime {
type AccountIndex = AccountIndex;
type Currency = Balances;
type Deposit = IndexDeposit;
type RuntimeEvent = RuntimeEvent;
type WeightInfo = pallet_indices::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub const ExistentialDeposit: Balance = DPR / 5;
// For weight estimation, we assume that the most locks on an individual account will be 50.
// This number may need to be adjusted in the future if this assumption no longer holds true.
pub const MaxLocks: u32 = 50;
pub const MinimumBurnedDPR: Balance = 50 * DPR;
}
impl pallet_balances::Config for Runtime {
type MaxLocks = MaxLocks;
type Balance = Balance;
type DustRemoval = ();
type RuntimeEvent = RuntimeEvent;
type ExistentialDeposit = ExistentialDeposit;
type AccountStore = frame_system::Pallet<Runtime>;
type MaxReserves = ();
type ReserveIdentifier = [u8; 8];
type WeightInfo = pallet_balances::weights::SubstrateWeight<Runtime>;
}
impl pallet_user_privileges::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type ForceOrigin = EnsureRoot<AccountId>;
type WeightInfo = pallet_user_privileges::weights::SubstrateWeight<Runtime>;
}
impl pallet_operation::Config for Runtime {
type MaxMember = MaxLocks;
type RuntimeEvent = RuntimeEvent;
type Currency = Balances;
type BurnedTo = Treasury;
type OPWeightInfo = pallet_operation::weights::SubstrateWeight<Runtime>;
type MinimumBurnedDPR = MinimumBurnedDPR;
type CreditInterface = Credit;
type UserPrivilegeInterface = UserPrivileges;
}
parameter_types! {
pub const TransactionByteFee: Balance = 1 * MILLICENTS;
pub const TargetBlockFullness: Perquintill = Perquintill::from_percent(25);
pub AdjustmentVariable: Multiplier = Multiplier::saturating_from_rational(1, 100_000);
pub MinimumMultiplier: Multiplier = Multiplier::saturating_from_rational(1, 1_000_000_000u128);
pub MaximumMultiplier: Multiplier = Bounded::max_value();
pub const OperationalFeeMultiplier: u8 = 5;
}
impl pallet_transaction_payment::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type OnChargeTransaction = CurrencyAdapter<Balances, DealWithFees>;
type WeightToFee = IdentityFee<Balance>;
type FeeMultiplierUpdate = TargetedFeeAdjustment<
Self,
TargetBlockFullness,
AdjustmentVariable,
MinimumMultiplier,
MaximumMultiplier,
>;
type OperationalFeeMultiplier = OperationalFeeMultiplier;
type LengthToFee = ConstantMultiplier<Balance, TransactionByteFee>;
}
parameter_types! {
pub const MinimumPeriod: Moment = SLOT_DURATION / 2;
}
impl pallet_timestamp::Config for Runtime {
type Moment = Moment;
type OnTimestampSet = Babe;
type MinimumPeriod = MinimumPeriod;
type WeightInfo = pallet_timestamp::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub const UncleGenerations: BlockNumber = 5;
}
impl pallet_authorship::Config for Runtime {
type FindAuthor = pallet_session::FindAccountFromAuthorIndex<Self, Babe>;
type UncleGenerations = UncleGenerations;
type FilterUncle = ();
type EventHandler = (Staking, ImOnline);
}
impl_opaque_keys! {
pub struct SessionKeys {
pub grandpa: Grandpa,
pub babe: Babe,
pub im_online: ImOnline,
pub authority_discovery: AuthorityDiscovery,
}
}
parameter_types! {
pub const DisabledValidatorsThreshold: Perbill = Perbill::from_percent(17);
}
impl pallet_session::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type ValidatorId = <Self as frame_system::Config>::AccountId;
type ValidatorIdOf = pallet_staking::StashOf<Self>;
type ShouldEndSession = Babe;
type NextSessionRotation = Babe;
type SessionManager = pallet_session::historical::NoteHistoricalRoot<Self, Staking>;
type SessionHandler = <SessionKeys as OpaqueKeys>::KeyTypeIdProviders;
type Keys = SessionKeys;
type WeightInfo = pallet_session::weights::SubstrateWeight<Runtime>;
}
impl pallet_session::historical::Config for Runtime {
type FullIdentification = pallet_staking::Exposure<AccountId, Balance>;
type FullIdentificationOf = pallet_staking::ExposureOf<Runtime>;
}
parameter_types! {
pub const SessionsPerEra: sp_staking::SessionIndex = 6;
pub const BondingDuration: pallet_staking::EraIndex = 24 * 28;
pub const SlashDeferDuration: pallet_staking::EraIndex = 24 * 7; // 1/4 the bonding duration.
pub const MiningReward: u128 = TOTAL_MINING_REWARD;
pub const AlertReward: u128 = 6_000_000_000_000_000_000_000_000;
pub const MaxDelegates: usize = 1;
pub const StakingPalletId: PalletId = PalletId(*b"stak_ing");
pub const RewardBlocksPerEra: BlockNumber = BLOCKS_PER_ERA - 80;
}
pub struct NumberCurrencyConverter;
impl Convert<u128, Balance> for NumberCurrencyConverter {
fn convert(x: u128) -> Balance {
x
}
}
impl pallet_staking::Config for Runtime {
type PalletId = StakingPalletId;
type BlocksPerEra = RewardBlocksPerEra;
type Currency = Balances;
type CreditInterface = Credit;
type UserPrivilegeInterface = UserPrivileges;
type NodeInterface = DeeperNode;
type MaxDelegates = MaxDelegates;
type UnixTime = Timestamp;
type NumberToCurrency = NumberCurrencyConverter;
type RuntimeEvent = RuntimeEvent;
type Slash = Treasury; // send the slashed funds to the treasury.
type SessionsPerEra = SessionsPerEra;
type BondingDuration = BondingDuration;
type SlashDeferDuration = SlashDeferDuration;
/// A super-majority of the council can cancel the slash.
type SlashCancelOrigin = EitherOfDiverse<
EnsureRoot<AccountId>,
pallet_collective::EnsureProportionAtLeast<AccountId, CouncilCollective, 3, 4>,
>;
type SessionInterface = Self;
type RuntimeCall = RuntimeCall;
type TotalMiningReward = MiningReward;
type AlertMiningReward = AlertReward;
type WeightInfo = pallet_staking::weights::SubstrateWeight<Runtime>;
type VerifySignatureInterface = CreditAccumulation;
type OperationInterface = Operation;
}
parameter_types! {
pub const LaunchPeriod: BlockNumber = 5 * DAYS;
pub const VotingPeriod: BlockNumber = 5 * DAYS;
pub const FastTrackVotingPeriod: BlockNumber = 3 * HOURS;
pub const InstantAllowed: bool = true;
pub const MinimumDeposit: Balance = 1000 * DPR;
pub const EnactmentPeriod: BlockNumber = 2 * DAYS;
pub const CooloffPeriod: BlockNumber = 5 * DAYS;
// One cent: $10 / MB
pub const PreimageByteDeposit: Balance = 1 * MILLICENTS;
pub const MaxVotes: u32 = 100;
pub const MaxProposals: u32 = 100;
}
impl pallet_democracy::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type Currency = Balances;
type EnactmentPeriod = EnactmentPeriod;
type LaunchPeriod = LaunchPeriod;
type VotingPeriod = VotingPeriod;
type VoteLockingPeriod = EnactmentPeriod; // Same as EnactmentPeriod
type MinimumDeposit = MinimumDeposit;
/// A straight majority of the council can decide what their next motion is.
type ExternalOrigin = EitherOfDiverse<
pallet_collective::EnsureProportionAtLeast<AccountId, CouncilCollective, 1, 2>,
frame_system::EnsureRoot<AccountId>,
>;
/// A super-majority can have the next scheduled referendum be a straight majority-carries vote.
type ExternalMajorityOrigin = EitherOfDiverse<
pallet_collective::EnsureProportionAtLeast<AccountId, CouncilCollective, 3, 5>,
frame_system::EnsureRoot<AccountId>,
>;
/// A unanimous council can have the next scheduled referendum be a straight default-carries
/// (NTB) vote.
type ExternalDefaultOrigin = EitherOfDiverse<
pallet_collective::EnsureProportionAtLeast<AccountId, CouncilCollective, 1, 1>,
frame_system::EnsureRoot<AccountId>,
>;
/// Two thirds of the technical committee can have an ExternalMajority/ExternalDefault vote
/// be tabled immediately and with a shorter voting/enactment period.
type FastTrackOrigin = EitherOfDiverse<
pallet_collective::EnsureProportionAtLeast<AccountId, TechnicalCollective, 2, 3>,
frame_system::EnsureRoot<AccountId>,
>;
type InstantOrigin = EitherOfDiverse<
pallet_collective::EnsureProportionAtLeast<AccountId, TechnicalCollective, 1, 1>,
frame_system::EnsureRoot<AccountId>,
>;
type InstantAllowed = InstantAllowed;
type FastTrackVotingPeriod = FastTrackVotingPeriod;
// To cancel a proposal which has been passed, 2/3 of the council must agree to it.
type CancellationOrigin = EitherOfDiverse<
pallet_collective::EnsureProportionAtLeast<AccountId, CouncilCollective, 2, 3>,
EnsureRoot<AccountId>,
>;
// To cancel a proposal before it has been passed, the technical committee must be unanimous or
// Root must agree.
type CancelProposalOrigin = EitherOfDiverse<
pallet_collective::EnsureProportionAtLeast<AccountId, TechnicalCollective, 1, 1>,
EnsureRoot<AccountId>,
>;
type BlacklistOrigin = EnsureRoot<AccountId>;
// Any single technical committee member may veto a coming council proposal, however they can
// only do it once and it lasts only for the cooloff period.
type VetoOrigin = pallet_collective::EnsureMember<AccountId, TechnicalCollective>;
type CooloffPeriod = CooloffPeriod;
type Slash = Treasury;
type Scheduler = Scheduler;
type PalletsOrigin = OriginCaller;
type MaxVotes = MaxVotes;
type WeightInfo = pallet_democracy::weights::SubstrateWeight<Runtime>;
type MaxProposals = MaxProposals;
type Preimages = Preimage;
type MaxDeposits = ConstU32<100>;
type MaxBlacklisted = ConstU32<100>;
}
parameter_types! {
pub const CouncilMotionDuration: BlockNumber = 2 * DAYS;
pub const CouncilMaxProposals: u32 = 100;
pub const CouncilMaxMembers: u32 = 13;
}
type CouncilCollective = pallet_collective::Instance1;
impl pallet_collective::Config<CouncilCollective> for Runtime {
type RuntimeOrigin = RuntimeOrigin;
type Proposal = RuntimeCall;
type RuntimeEvent = RuntimeEvent;
type MotionDuration = CouncilMotionDuration;
type MaxProposals = CouncilMaxProposals;
type MaxMembers = CouncilMaxMembers;
type DefaultVote = pallet_collective::PrimeDefaultVote;
type WeightInfo = pallet_collective::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub const CandidacyBond: Balance = 10 * DPR;
// 1 storage item created, key size is 32 bytes, value size is 16+16.
pub const VotingBondBase: Balance = deposit(1, 64);
// additional data per vote is 32 bytes (account id).
pub const VotingBondFactor: Balance = deposit(0, 32);
pub const TermDuration: BlockNumber = 7 * DAYS;
pub const DesiredMembers: u32 = 13;
pub const DesiredRunnersUp: u32 = 7;
pub const MaxVoters: u32 = 1000;
pub const MaxCandidates: u32 = 1000;
pub const ElectionsPhragmenPalletId: LockIdentifier = *b"phrelect";
}
// Make sure that there are no more than `MaxMembers` members elected via elections-phragmen.
const_assert!(DesiredMembers::get() <= CouncilMaxMembers::get());
impl pallet_elections_phragmen::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type PalletId = ElectionsPhragmenPalletId;
type Currency = Balances;
type ChangeMembers = Council;
// NOTE: this implies that council's genesis members cannot be set directly and must come from
// this module.
type InitializeMembers = (); //Council;
type CurrencyToVote = U128CurrencyToVote;
type CandidacyBond = CandidacyBond;
type VotingBondBase = VotingBondBase;
type VotingBondFactor = VotingBondFactor;
type LoserCandidate = ();
type KickedMember = ();
type DesiredMembers = DesiredMembers;
type DesiredRunnersUp = DesiredRunnersUp;
type TermDuration = TermDuration;
type WeightInfo = pallet_elections_phragmen::weights::SubstrateWeight<Runtime>;
type MaxVoters = MaxVoters;
type MaxCandidates = MaxCandidates;
}
parameter_types! {
pub const TechnicalMotionDuration: BlockNumber = 7 * DAYS;
pub const TechnicalMaxProposals: u32 = 100;
pub const TechnicalMaxMembers: u32 = 100;
}
type TechnicalCollective = pallet_collective::Instance2;
impl pallet_collective::Config<TechnicalCollective> for Runtime {
type RuntimeOrigin = RuntimeOrigin;
type Proposal = RuntimeCall;
type RuntimeEvent = RuntimeEvent;
type MotionDuration = TechnicalMotionDuration;
type MaxProposals = TechnicalMaxProposals;
type MaxMembers = TechnicalMaxMembers;
type DefaultVote = pallet_collective::PrimeDefaultVote;
type WeightInfo = pallet_collective::weights::SubstrateWeight<Runtime>;
}
type EnsureRootOrHalfCouncil = EitherOfDiverse<
EnsureRoot<AccountId>,
pallet_collective::EnsureProportionMoreThan<AccountId, CouncilCollective, 1, 2>,
>;
impl pallet_membership::Config<pallet_membership::Instance1> for Runtime {
type RuntimeEvent = RuntimeEvent;
type AddOrigin = EnsureRootOrHalfCouncil;
type RemoveOrigin = EnsureRootOrHalfCouncil;
type SwapOrigin = EnsureRootOrHalfCouncil;
type ResetOrigin = EnsureRootOrHalfCouncil;
type PrimeOrigin = EnsureRootOrHalfCouncil;
type MembershipInitialized = TechnicalCommittee;
type MembershipChanged = TechnicalCommittee;
type MaxMembers = TechnicalMaxMembers;
type WeightInfo = pallet_membership::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub const ProposalBond: Permill = Permill::from_percent(5);
pub const ProposalBondMinimum: Balance = 1 * DPR;
pub const SpendPeriod: BlockNumber = 14 * DAYS;
pub const Burn: Permill = Permill::from_percent(1);
pub const TipCountdown: BlockNumber = 2 * DAYS;
pub const TipFindersFee: Percent = Percent::from_percent(20);
pub const TipReportDepositBase: Balance = 1 * DPR;
pub const DataDepositPerByte: Balance = 1 * CENTS;
pub const BountyDepositBase: Balance = 1 * DPR;
pub const CuratorDepositMultiplier: Permill = Permill::from_percent(50);
pub const CuratorDepositMin: Balance = 1 * DOLLARS;
pub const CuratorDepositMax: Balance = 100 * DOLLARS;
pub const BountyDepositPayoutDelay: BlockNumber = 1 * DAYS;
pub const TreasuryPalletId: PalletId = PalletId(*b"py/trsry");
pub const BountyUpdatePeriod: BlockNumber = 14 * DAYS;
pub const MaximumReasonLength: u32 = 16384;
pub const BountyCuratorDeposit: Permill = Permill::from_percent(50);
pub const BountyValueMinimum: Balance = 5 * DPR;
pub const MaxApprovals: u32 = 100;
pub const MaxActiveChildBountyCount: u32 = 5;
pub const ChildBountyValueMinimum: Balance = 1 * DOLLARS;
pub const ChildBountyCuratorDepositBase: Permill = Permill::from_percent(10);
}
impl pallet_treasury::Config for Runtime {
type PalletId = TreasuryPalletId;
type Currency = Balances;
type ApproveOrigin = EitherOfDiverse<
EnsureRoot<AccountId>,
pallet_collective::EnsureProportionAtLeast<AccountId, CouncilCollective, 3, 5>,
>;
type RejectOrigin = EnsureRootOrHalfCouncil;
type RuntimeEvent = RuntimeEvent;
type OnSlash = ();
type ProposalBond = ProposalBond;
type ProposalBondMinimum = ProposalBondMinimum;
type ProposalBondMaximum = ();
type SpendPeriod = SpendPeriod;
type Burn = Burn;
type BurnDestination = ();
type SpendFunds = Bounties;
type WeightInfo = pallet_treasury::weights::SubstrateWeight<Runtime>;
type MaxApprovals = MaxApprovals;
type SpendOrigin = frame_support::traits::NeverEnsureOrigin<Balance>;
}
impl pallet_bounties::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type BountyDepositBase = BountyDepositBase;
type BountyDepositPayoutDelay = BountyDepositPayoutDelay;
type BountyUpdatePeriod = BountyUpdatePeriod;
type BountyValueMinimum = BountyValueMinimum;
type CuratorDepositMultiplier = CuratorDepositMultiplier;
type CuratorDepositMin = CuratorDepositMin;
type CuratorDepositMax = CuratorDepositMax;
type DataDepositPerByte = DataDepositPerByte;
type MaximumReasonLength = MaximumReasonLength;
type WeightInfo = pallet_bounties::weights::SubstrateWeight<Runtime>;
type ChildBountyManager = ChildBounties;
}
impl pallet_child_bounties::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type MaxActiveChildBountyCount = MaxActiveChildBountyCount;
type ChildBountyValueMinimum = ChildBountyValueMinimum;
type WeightInfo = pallet_child_bounties::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub const MaximumCreditReward: u64 = 15;
}
impl pallet_tips::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type DataDepositPerByte = DataDepositPerByte;
type MaximumReasonLength = MaximumReasonLength;
type MaximumCreditReward = MaximumCreditReward;
type Tippers = Elections;
type TipCountdown = TipCountdown;
type TipFindersFee = TipFindersFee;
type TipReportDepositBase = TipReportDepositBase;
type CreditInterface = Credit;
type WeightInfo = pallet_tips::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub const DepositPerItem: Balance = deposit(1, 0);
pub const DepositPerByte: Balance = deposit(0, 1);
pub RentFraction: Perbill = Perbill::from_rational(1u32, 30 * DAYS);
// The lazy deletion runs inside on_initialize.
pub DeletionWeightLimit: Weight = AVERAGE_ON_INITIALIZE_RATIO *
RuntimeBlockWeights::get().max_block;
// The weight needed for decoding the queue should be less or equal than a fifth
// of the overall weight dedicated to the lazy deletion.
pub const DeletionQueueDepth: u32 = 128;
pub Schedule: pallet_contracts::Schedule<Runtime> = Default::default();
}
impl pallet_contracts::Config for Runtime {
type Time = Timestamp;
type Randomness = RandomnessCollectiveFlip;
type Currency = Balances;
type RuntimeEvent = RuntimeEvent;
type WeightPrice = pallet_transaction_payment::Pallet<Self>;
type WeightInfo = pallet_contracts::weights::SubstrateWeight<Self>;
type ChainExtension = ();
type DeletionQueueDepth = DeletionQueueDepth;
type DeletionWeightLimit = DeletionWeightLimit;
type Schedule = Schedule;
type RuntimeCall = RuntimeCall;
type CallFilter = Nothing;
type CallStack = [pallet_contracts::Frame<Self>; 31];
type DepositPerItem = DepositPerItem;
type DepositPerByte = DepositPerByte;
type AddressGenerator = pallet_contracts::DefaultAddressGenerator;
type MaxCodeLen = ConstU32<{ 128 * 1024 }>;
type MaxStorageKeyLen = ConstU32<128>;
}
impl pallet_sudo::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type RuntimeCall = RuntimeCall;
}
parameter_types! {
pub const SessionDuration: BlockNumber = EPOCH_DURATION_IN_SLOTS as _;
pub const ImOnlineUnsignedPriority: TransactionPriority = TransactionPriority::max_value();
pub const MaxAuthorities: u32 = 100;
pub const MaxKeys: u32 = 10_000;
pub const MaxPeerInHeartbeats: u32 = 10_000;
pub const MaxPeerDataEncodingSize: u32 = 1_000;
}
impl<LocalCall> frame_system::offchain::CreateSignedTransaction<LocalCall> for Runtime
where
RuntimeCall: From<LocalCall>,
{
fn create_transaction<C: frame_system::offchain::AppCrypto<Self::Public, Self::Signature>>(
call: RuntimeCall,
public: <Signature as traits::Verify>::Signer,
account: AccountId,
nonce: Index,
) -> Option<(
RuntimeCall,
<UncheckedExtrinsic as traits::Extrinsic>::SignaturePayload,
)> {
let tip = 0;
// take the biggest period possible.
let period = BlockHashCount::get()
.checked_next_power_of_two()
.map(|c| c / 2)
.unwrap_or(2) as u64;
let current_block = System::block_number()
.saturated_into::<u64>()
// The `System::block_number` is initialized with `n+1`,
// so the actual block number is `n`.
.saturating_sub(1);
let era = Era::mortal(period, current_block);
let extra = (
frame_system::CheckNonZeroSender::<Runtime>::new(),
frame_system::CheckSpecVersion::<Runtime>::new(),
frame_system::CheckTxVersion::<Runtime>::new(),
frame_system::CheckGenesis::<Runtime>::new(),
frame_system::CheckEra::<Runtime>::from(era),
frame_system::CheckNonce::<Runtime>::from(nonce),
frame_system::CheckWeight::<Runtime>::new(),
pallet_transaction_payment::ChargeTransactionPayment::<Runtime>::from(tip),
);
let raw_payload = SignedPayload::new(call, extra)
.map_err(|e| {
log::warn!("Unable to create signed payload: {:?}", e);
})
.ok()?;
let signature = raw_payload.using_encoded(|payload| C::sign(payload, public))?;
let address = Indices::unlookup(account);
let (call, extra, _) = raw_payload.deconstruct();
Some((call, (address, signature.into(), extra)))
}
}
impl frame_system::offchain::SigningTypes for Runtime {
type Public = <Signature as traits::Verify>::Signer;
type Signature = Signature;
}
impl<C> frame_system::offchain::SendTransactionTypes<C> for Runtime
where
RuntimeCall: From<C>,
{
type Extrinsic = UncheckedExtrinsic;
type OverarchingCall = RuntimeCall;
}
impl pallet_im_online::Config for Runtime {
type AuthorityId = ImOnlineId;
type RuntimeEvent = RuntimeEvent;
type NextSessionRotation = Babe;
type ValidatorSet = Historical;
type ReportUnresponsiveness = Offences;
type UnsignedPriority = ImOnlineUnsignedPriority;
type WeightInfo = pallet_im_online::weights::SubstrateWeight<Runtime>;
type MaxKeys = MaxKeys;
type MaxPeerInHeartbeats = MaxPeerInHeartbeats;
type MaxPeerDataEncodingSize = MaxPeerDataEncodingSize;
}
parameter_types! {
pub OffencesWeightSoftLimit: Weight = Perbill::from_percent(60) *
RuntimeBlockWeights::get().max_block;
}
impl pallet_offences::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type IdentificationTuple = pallet_session::historical::IdentificationTuple<Self>;
type OnOffenceHandler = Staking;
}
impl pallet_authority_discovery::Config for Runtime {
type MaxAuthorities = MaxAuthorities;
}
impl pallet_grandpa::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type KeyOwnerProofSystem = Historical;
type KeyOwnerProof =
<Self::KeyOwnerProofSystem as KeyOwnerProofSystem<(KeyTypeId, GrandpaId)>>::Proof;
type KeyOwnerIdentification = <Self::KeyOwnerProofSystem as KeyOwnerProofSystem<(
KeyTypeId,
GrandpaId,
)>>::IdentificationTuple;
type HandleEquivocation = pallet_grandpa::EquivocationHandler<
Self::KeyOwnerIdentification,
Offences,
ReportLongevity,
>;
type WeightInfo = ();
type MaxAuthorities = MaxAuthorities;
}
parameter_types! {
pub const BasicDeposit: Balance = 10 * DPR; // 258 bytes on-chain
pub const FieldDeposit: Balance = 250 * CENTS; // 66 bytes on-chain
pub const SubAccountDeposit: Balance = 2 * DPR; // 53 bytes on-chain
pub const MaxSubAccounts: u32 = 100;
pub const MaxAdditionalFields: u32 = 100;
pub const MaxRegistrars: u32 = 20;
}
impl pallet_identity::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type Currency = Balances;
type BasicDeposit = BasicDeposit;
type FieldDeposit = FieldDeposit;
type SubAccountDeposit = SubAccountDeposit;
type MaxSubAccounts = MaxSubAccounts;
type MaxAdditionalFields = MaxAdditionalFields;
type MaxRegistrars = MaxRegistrars;
type Slashed = Treasury;
type ForceOrigin = EnsureRootOrHalfCouncil;
type RegistrarOrigin = EnsureRootOrHalfCouncil;
type WeightInfo = pallet_identity::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub const ConfigDepositBase: Balance = 5 * DPR;
pub const FriendDepositFactor: Balance = 50 * CENTS;
pub const MaxFriends: u16 = 9;
pub const RecoveryDeposit: Balance = 5 * DPR;
}
impl pallet_recovery::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type WeightInfo = pallet_recovery::weights::SubstrateWeight<Runtime>;
type RuntimeCall = RuntimeCall;
type Currency = Balances;
type ConfigDepositBase = ConfigDepositBase;
type FriendDepositFactor = FriendDepositFactor;
type MaxFriends = MaxFriends;
type RecoveryDeposit = RecoveryDeposit;
}
parameter_types! {
pub const CandidateDeposit: Balance = 10 * DPR;
pub const WrongSideDeduction: Balance = 2 * DPR;
pub const MaxStrikes: u32 = 10;
pub const RotationPeriod: BlockNumber = 80 * HOURS;
pub const PeriodSpend: Balance = 500 * DPR;
pub const MaxLockDuration: BlockNumber = 36 * 30 * DAYS;
pub const ChallengePeriod: BlockNumber = 7 * DAYS;
pub const MaxCandidateIntake: u32 = 10;
pub const SocietyPalletId: PalletId = PalletId(*b"py/socie");
}
impl pallet_society::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type PalletId = SocietyPalletId;
type Currency = Balances;
type Randomness = RandomnessCollectiveFlip;
type CandidateDeposit = CandidateDeposit;
type WrongSideDeduction = WrongSideDeduction;
type MaxStrikes = MaxStrikes;
type PeriodSpend = PeriodSpend;
type MembershipChanged = ();
type RotationPeriod = RotationPeriod;
type MaxLockDuration = MaxLockDuration;
type FounderSetOrigin =
pallet_collective::EnsureProportionMoreThan<AccountId, CouncilCollective, 1, 2>;
type SuspensionJudgementOrigin = pallet_society::EnsureFounder<Runtime>;
type MaxCandidateIntake = MaxCandidateIntake;
type ChallengePeriod = ChallengePeriod;
}
parameter_types! {
pub const MinVestedTransfer: Balance = 100 * DPR;
pub UnvestedFundsAllowedWithdrawReasons: WithdrawReasons =
WithdrawReasons::except(WithdrawReasons::TRANSFER | WithdrawReasons::RESERVE);
}
impl pallet_vesting::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type Currency = Balances;
type BlockNumberToBalance = ConvertInto;
type MinVestedTransfer = MinVestedTransfer;
type WeightInfo = pallet_vesting::weights::SubstrateWeight<Runtime>;
type UnvestedFundsAllowedWithdrawReasons = UnvestedFundsAllowedWithdrawReasons;
const MAX_VESTING_SCHEDULES: u32 = 28;
}
impl pallet_mmr::Config for Runtime {
const INDEXING_PREFIX: &'static [u8] = b"mmr";
type Hashing = <Runtime as frame_system::Config>::Hashing;
type Hash = <Runtime as frame_system::Config>::Hash;
type LeafData = pallet_mmr::ParentNumberAndHash<Self>;
type OnNewRoot = ();
type WeightInfo = ();
}
parameter_types! {
pub const LotteryPalletId: PalletId = PalletId(*b"py/lotto");
pub const MaxCalls: u32 = 10;
pub const MaxGenerateRandom: u32 = 10;
}
impl pallet_lottery::Config for Runtime {
type PalletId = LotteryPalletId;
type RuntimeCall = RuntimeCall;
type RuntimeEvent = RuntimeEvent;
type Currency = Balances;
type Randomness = RandomnessCollectiveFlip;
type ManagerOrigin = EnsureRoot<AccountId>;
type MaxCalls = MaxCalls;
type ValidateCall = Lottery;
type MaxGenerateRandom = MaxGenerateRandom;
type WeightInfo = pallet_lottery::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub const AssetDeposit: Balance = 100 * DPR;
pub const ApprovalDeposit: Balance = 1 * DPR;
pub const StringLimit: u32 = 1000;
pub const MetadataDepositBase: Balance = Balance::min_value();
pub const MetadataDepositPerByte: Balance = Balance::min_value();
}
impl pallet_assets::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type Balance = u64;
type AssetId = u32;
type Currency = Balances;
type ForceOrigin = EnsureRoot<AccountId>;
type AssetDeposit = AssetDeposit;
type AssetAccountDeposit = ConstU128<DOLLARS>;
type StringLimit = StringLimit;
type MetadataDepositBase = MetadataDepositBase;
type MetadataDepositPerByte = MetadataDepositPerByte;
type ApprovalDeposit = ApprovalDeposit;
type Freezer = ();
type Extra = ();
type WeightInfo = pallet_assets::weights::SubstrateWeight<Runtime>;
}
parameter_types! {
pub const SecsPerBlock: u32 = MILLISECS_PER_BLOCK as u32 / 1000;
pub const DataPerDPR: u64 = 1024 * 1024 * 1024 * 1024;
pub const MicropaymentBurn: Percent = Percent::from_percent(10);
}
#[cfg(feature = "runtime-benchmarks")]
mod bench_mark_account {
use crate::{AccountId, Signature};
use node_primitives::AccountCreator;
use sp_io::crypto::sr25519_generate;
use sp_runtime::{
traits::{IdentifyAccount, Verify},
MultiSigner,
};
use sp_std::borrow::ToOwned;
type AccountPublic = <Signature as Verify>::Signer;
pub struct DefaultAccountCreator;
impl AccountCreator<AccountId> for DefaultAccountCreator {
fn create_account(s: &'static str) -> AccountId {
let seed = "//".to_owned() + &s;
let signer: MultiSigner =
sr25519_generate(0.into(), Some(seed.as_bytes().to_vec())).into();
let account_id: AccountId = AccountPublic::from(signer).into_account();
account_id
}
}
}
impl pallet_micropayment::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type Currency = Balances;
type CreditInterface = Credit;
type SecsPerBlock = SecsPerBlock;
type DataPerDPR = DataPerDPR;
type WeightInfo = pallet_micropayment::weights::SubstrateWeight<Runtime>;
type NodeInterface = DeeperNode;
type MicropaymentBurn = MicropaymentBurn;
type Slash = Treasury;
#[cfg(feature = "runtime-benchmarks")]
type AccountCreator = bench_mark_account::DefaultAccountCreator;
}
parameter_types! {
pub const MinLockAmt: u32 = 100000;
pub const MaxDurationEras: u8 = 7;
pub const MaxIpLength: usize = 256;
}
impl pallet_deeper_node::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type Currency = Balances;
type MinLockAmt = MinLockAmt;
type MaxDurationEras = MaxDurationEras;
type BlocksPerEra = BlocksPerEra;
type MaxIpLength = MaxIpLength;
type WeightInfo = pallet_deeper_node::weights::SubstrateWeight<Runtime>;
type VerifySignatureInterface = CreditAccumulation;
}
parameter_types! {
pub const CreditAttenuationStep: u64 = 1;
pub const MinCreditToDelegate: u64 = 100;
pub const MicropaymentToCreditFactor: u128 = MICROPAYMENT_TO_CREDIT_FACTOR;
pub const BlocksPerEra: BlockNumber = BLOCKS_PER_ERA;
pub const DPRPerCreditBurned: Balance = 50 * DPR;
}
parameter_types! {
pub const CollectionDeposit: Balance = Balance::min_value();
pub const ItemDeposit: Balance = Balance::min_value();
pub const KeyLimit: u32 = 32;
pub const ValueLimit: u32 = 256;
}
impl pallet_uniques::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type CollectionId = u32;
type ItemId = u32;
type Currency = Balances;
type ForceOrigin = frame_system::EnsureRoot<AccountId>;
type CollectionDeposit = CollectionDeposit;
type ItemDeposit = ItemDeposit;
type MetadataDepositBase = MetadataDepositBase;
type AttributeDepositBase = MetadataDepositBase;
type DepositPerByte = MetadataDepositPerByte;
type StringLimit = StringLimit;
type KeyLimit = KeyLimit;
type ValueLimit = ValueLimit;
type WeightInfo = pallet_uniques::weights::SubstrateWeight<Runtime>;
type CreateOrigin = AsEnsureOriginWithArg<frame_system::EnsureSigned<AccountId>>;
type Locker = ();
}
parameter_types! {
pub const MaxBurnCreditPerAddress: u32 = 50;
}
impl pallet_credit::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type BlocksPerEra = BlocksPerEra;
type Currency = Balances;
type CreditAttenuationStep = CreditAttenuationStep;
type MinCreditToDelegate = MinCreditToDelegate;
type MicropaymentToCreditFactor = MicropaymentToCreditFactor;
type NodeInterface = DeeperNode;
type WeightInfo = pallet_credit::weights::SubstrateWeight<Runtime>;
type SecsPerBlock = SecsPerBlock;
type UnixTime = Timestamp;
type DPRPerCreditBurned = DPRPerCreditBurned;
type BurnedTo = Treasury;
type UserPrivilegeInterface = UserPrivileges;
type MaxBurnCreditPerAddress = MaxBurnCreditPerAddress;
}
impl pallet_credit_accumulation::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type Currency = Balances;
type CreditInterface = Credit;
type WeightInfo = pallet_credit_accumulation::weights::SubstrateWeight<Runtime>;
#[cfg(feature = "runtime-benchmarks")]
type AccountCreator = bench_mark_account::DefaultAccountCreator;
}
pub struct EvmDealWithFees;
impl OnUnbalanced<NegativeImbalance> for EvmDealWithFees {
fn on_unbalanced(fees: NegativeImbalance) {
// 100% to treasury
Treasury::on_unbalanced(fees);
}
}
#[cfg(feature = "testnet")]
parameter_types! {
pub const ChainId: u64 = 518518;
}
#[cfg(not(feature = "testnet"))]
parameter_types! {
pub const ChainId: u64 = 518;
}
parameter_types! {
pub BlockGasLimit: U256 = U256::from(NORMAL_DISPATCH_RATIO * MAXIMUM_BLOCK_WEIGHT.ref_time() / WEIGHT_PER_GAS);
pub PrecompilesValue: FrontierPrecompiles<Runtime> = FrontierPrecompiles::<_>::new();
pub WeightPerGas: Weight = Weight::from_ref_time(WEIGHT_PER_GAS);
}
impl pallet_evm::Config for Runtime {
type FeeCalculator = BaseFee;
type GasWeightMapping = pallet_evm::FixedGasWeightMapping<Self>;
type WeightPerGas = WeightPerGas;
type BlockHashMapping = pallet_ethereum::EthereumBlockHashMapping<Self>;
type AddressMapping = PairedAddressMapping<Runtime>;
type Currency = Balances;
type RuntimeEvent = RuntimeEvent;
type Runner = pallet_evm::runner::stack::Runner<Self>;
type PrecompilesType = FrontierPrecompiles<Self>;
type PrecompilesValue = PrecompilesValue;
type ChainId = ChainId;
type BlockGasLimit = BlockGasLimit;
type OnChargeTransaction = EVMCurrencyAdapter<Balances, EvmDealWithFees>;
type FindAuthor = pallet_session::FindAccountFromAuthorIndex<Self, Babe>;
}
impl pallet_ethereum::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type StateRoot = pallet_ethereum::IntermediateStateRoot<Self>;
}
frame_support::parameter_types! {
pub BoundDivision: U256 = U256::from(1024);
}
impl pallet_dynamic_fee::Config for Runtime {
type MinGasPriceBoundDivisor = BoundDivision;
}
frame_support::parameter_types! {
pub IsActive: bool = true;
pub DefaultBaseFeePerGas: U256 = U256::from(1_000_000_000);
pub DefaultElasticity: Permill = Permill::from_parts(125_000);
}
pub struct BaseFeeThreshold;
impl pallet_base_fee::BaseFeeThreshold for BaseFeeThreshold {
fn lower() -> Permill {
Permill::zero()
}
fn ideal() -> Permill {
Permill::from_parts(500_000)
}
fn upper() -> Permill {
Permill::from_parts(1_000_000)
}
}
impl pallet_base_fee::Config for Runtime {
type RuntimeEvent = RuntimeEvent;
type Threshold = BaseFeeThreshold;
type DefaultBaseFeePerGas = DefaultBaseFeePerGas;
type DefaultElasticity = DefaultElasticity;
}
construct_runtime!(
pub enum Runtime where
Block = Block,
NodeBlock = opaque::Block,
UncheckedExtrinsic = UncheckedExtrinsic
{
System: frame_system::{Pallet, Call, Config, Storage, Event<T>} = 0,
Scheduler: pallet_scheduler::{Pallet, Call, Storage, Event<T>} = 1,
Preimage: pallet_preimage::{Pallet, Call, Storage, Event<T>} = 10,
Babe: pallet_babe::{Pallet, Call, Storage, Config, ValidateUnsigned} = 2,
Timestamp: pallet_timestamp::{Pallet, Call, Storage, Inherent} = 3,
Indices: pallet_indices::{Pallet, Call, Storage, Config<T>, Event<T>} = 4,
Balances: pallet_balances::{Pallet, Call, Storage, Config<T>, Event<T>} = 5,
TransactionPayment: pallet_transaction_payment::{Pallet, Storage, Event<T>} = 32,
Authorship: pallet_authorship::{Pallet, Call, Storage, Inherent} = 6,
Staking: pallet_staking::{Pallet, Call, Config<T>, Storage, Event<T>} = 7,
Offences: pallet_offences::{Pallet, Storage, Event} = 8,
Historical: pallet_session_historical::{Pallet} = 33,
Session: pallet_session::{Pallet, Call, Storage, Event, Config<T>} = 9,
Grandpa: pallet_grandpa::{Pallet, Call, Storage, Config, Event, ValidateUnsigned} = 11,
ImOnline: pallet_im_online::{Pallet, Call, Storage, Event<T>, ValidateUnsigned, Config<T>} = 12,
AuthorityDiscovery: pallet_authority_discovery::{Pallet, Config} = 13,
Democracy: pallet_democracy::{Pallet, Call, Storage, Config<T>, Event<T>} = 14,
Council: pallet_collective::<Instance1>::{Pallet, Call, Storage, Origin<T>, Event<T>, Config<T>} = 15,
TechnicalCommittee: pallet_collective::<Instance2>::{Pallet, Call, Storage, Origin<T>, Event<T>, Config<T>} = 16,
Elections: pallet_elections_phragmen::{Pallet, Call, Storage, Event<T>, Config<T>} = 17,
TechnicalMembership: pallet_membership::<Instance1>::{Pallet, Call, Storage, Event<T>, Config<T>} = 18,
Treasury: pallet_treasury::{Pallet, Call, Storage, Config, Event<T>} = 19,
Credit: pallet_credit::{Pallet, Call, Storage, Event<T>, Config<T>} = 20,
Vesting: pallet_vesting::{Pallet, Call, Storage, Event<T>, Config<T>} = 25,
Utility: pallet_utility::{Pallet, Call, Event} = 26,
Identity: pallet_identity::{Pallet, Call, Storage, Event<T>} =28,
Proxy: pallet_proxy::{Pallet, Call, Storage, Event<T>} = 29,
Multisig: pallet_multisig::{Pallet, Call, Storage, Event<T>} = 30,
Bounties: pallet_bounties::{Pallet, Call, Storage, Event<T>} = 34,
Tips: pallet_tips::{Pallet, Call, Storage, Event<T>} = 35,
Contracts: pallet_contracts::{Pallet, Call, Storage, Event<T>} = 40,
Sudo: pallet_sudo::{Pallet, Call, Config<T>, Storage, Event<T>} = 41,
RandomnessCollectiveFlip: pallet_randomness_collective_flip::{Pallet, Storage} = 42,
Society: pallet_society::{Pallet, Call, Storage, Event<T>, Config<T>} = 43,
Recovery: pallet_recovery::{Pallet, Call, Storage, Event<T>} = 44,
Assets: pallet_assets::{Pallet, Call, Storage, Event<T>} = 45,
Mmr: pallet_mmr::{Pallet, Storage} = 46,
Lottery: pallet_lottery::{Pallet, Call, Storage, Event<T>} = 47,
ChildBounties: pallet_child_bounties::{Pallet, Call, Storage, Event<T>} = 48,
Uniques: pallet_uniques::{Pallet, Call, Storage, Event<T>} = 49,
Micropayment: pallet_micropayment::{Pallet, Call, Storage, Event<T>} = 60,
DeeperNode: pallet_deeper_node::{Pallet, Call, Storage, Event<T>, Config<T> } = 61,
CreditAccumulation: pallet_credit_accumulation::{Pallet, Call, Storage, Event<T>} = 62,
Ethereum: pallet_ethereum::{Pallet, Call, Storage, Event, Config, Origin} = 80,
EVM: pallet_evm::{Pallet, Config<T>, Call, Storage, Event<T>} = 81,
BaseFee: pallet_base_fee::{Pallet, Call, Storage, Config<T>, Event} = 82,
DynamicFee: pallet_dynamic_fee::{Pallet, Call, Storage, Config, Inherent} = 83,
Operation: pallet_operation::{Pallet, Call, Storage,Event<T>} = 90,
UserPrivileges: pallet_user_privileges::{Pallet, Call, Storage,Event<T>} = 91,
}
);
pub struct TransactionConverter;
impl fp_rpc::ConvertTransaction<UncheckedExtrinsic> for TransactionConverter {
fn convert_transaction(&self, transaction: pallet_ethereum::Transaction) -> UncheckedExtrinsic {
UncheckedExtrinsic::new_unsigned(
pallet_ethereum::Call::<Runtime>::transact { transaction }.into(),
)
}
}
impl fp_rpc::ConvertTransaction<opaque::UncheckedExtrinsic> for TransactionConverter {
fn convert_transaction(
&self,
transaction: pallet_ethereum::Transaction,
) -> opaque::UncheckedExtrinsic {
let extrinsic = UncheckedExtrinsic::new_unsigned(
pallet_ethereum::Call::<Runtime>::transact { transaction }.into(),
);
let encoded = extrinsic.encode();
opaque::UncheckedExtrinsic::decode(&mut &encoded[..])
.expect("Encoded extrinsic is always valid")
}
}
/// The address format for describing accounts.
pub type Address = sp_runtime::MultiAddress<AccountId, AccountIndex>;
/// Block header type as expected by this runtime.
pub type Header = generic::Header<BlockNumber, BlakeTwo256>;
/// Block type as expected by this runtime.
pub type Block = generic::Block<Header, UncheckedExtrinsic>;
/// A Block signed with a Justification
pub type SignedBlock = generic::SignedBlock<Block>;
/// BlockId type as expected by this runtime.
pub type BlockId = generic::BlockId<Block>;
/// The SignedExtension to the basic transaction logic.
///
/// When you change this, you **MUST** modify [`sign`] in `bin/node/testing/src/keyring.rs`!
///
/// [`sign`]: <../../testing/src/keyring.rs.html>
pub type SignedExtra = (
frame_system::CheckNonZeroSender<Runtime>,
frame_system::CheckSpecVersion<Runtime>,
frame_system::CheckTxVersion<Runtime>,
frame_system::CheckGenesis<Runtime>,
frame_system::CheckEra<Runtime>,
frame_system::CheckNonce<Runtime>,
frame_system::CheckWeight<Runtime>,
pallet_transaction_payment::ChargeTransactionPayment<Runtime>,
);
/// The payload being signed in transactions.
pub type SignedPayload = generic::SignedPayload<RuntimeCall, SignedExtra>;
/// Unchecked extrinsic type as expected by this runtime.
pub type UncheckedExtrinsic =
fp_self_contained::UncheckedExtrinsic<Address, RuntimeCall, Signature, SignedExtra>;
/// Extrinsic type that has already been checked.
pub type CheckedExtrinsic =
fp_self_contained::CheckedExtrinsic<AccountId, RuntimeCall, SignedExtra, H160>;
/// Added for "testing/src" and "cargo test"
pub type CheckedSignature = fp_self_contained::CheckedSignature<AccountId, SignedExtra, H160>;
/// Unchecked extrinsic type as expected by this runtime.
pub type GenericUncheckedExtrinsic =
generic::UncheckedExtrinsic<Address, RuntimeCall, Signature, SignedExtra>;
/// Executive: handles dispatch to the various modules.
pub type Executive = frame_executive::Executive<
Runtime,
Block,
frame_system::ChainContext<Runtime>,
Runtime,
AllPalletsWithSystem,
(),
>;
impl fp_self_contained::SelfContainedCall for RuntimeCall {
type SignedInfo = H160;
fn is_self_contained(&self) -> bool {
match self {
RuntimeCall::Ethereum(call) => call.is_self_contained(),
_ => false,
}
}
fn check_self_contained(&self) -> Option<Result<Self::SignedInfo, TransactionValidityError>> {
match self {
RuntimeCall::Ethereum(call) => call.check_self_contained(),
_ => None,
}
}
fn validate_self_contained(
&self,
info: &Self::SignedInfo,
dispatch_info: &DispatchInfoOf<RuntimeCall>,
len: usize,
) -> Option<TransactionValidity> {
match self {
RuntimeCall::Ethereum(call) => call.validate_self_contained(info, dispatch_info, len),
_ => None,
}
}
fn pre_dispatch_self_contained(
&self,
info: &Self::SignedInfo,
dispatch_info: &DispatchInfoOf<RuntimeCall>,
len: usize,
) -> Option<Result<(), TransactionValidityError>> {
match self {
RuntimeCall::Ethereum(call) => {
call.pre_dispatch_self_contained(info, dispatch_info, len)
}
_ => None,
}
}
fn apply_self_contained(
self,
info: Self::SignedInfo,
) -> Option<sp_runtime::DispatchResultWithInfo<PostDispatchInfoOf<Self>>> {
match self {
call @ RuntimeCall::Ethereum(pallet_ethereum::Call::transact { .. }) => {
Some(call.dispatch(RuntimeOrigin::from(
pallet_ethereum::RawOrigin::EthereumTransaction(info),
)))
}
_ => None,
}
}
}
/// MMR helper types.
mod mmr {
use super::Runtime;
pub use pallet_mmr::primitives::*;
pub type Leaf = <<Runtime as pallet_mmr::Config>::LeafData as LeafDataProvider>::LeafData;
pub type Hash = <Runtime as pallet_mmr::Config>::Hash;
pub type Hashing = <Runtime as pallet_mmr::Config>::Hashing;
}
impl_runtime_apis! {
impl sp_api::Core<Block> for Runtime {
fn version() -> RuntimeVersion {
VERSION
}
fn execute_block(block: Block) {
Executive::execute_block(block)
}
fn initialize_block(header: &<Block as BlockT>::Header) {
Executive::initialize_block(header)
}
}
impl sp_api::Metadata<Block> for Runtime {
fn metadata() -> OpaqueMetadata {
OpaqueMetadata::new(Runtime::metadata().into())
}
}
impl sp_block_builder::BlockBuilder<Block> for Runtime {
fn apply_extrinsic(extrinsic: <Block as BlockT>::Extrinsic) -> ApplyExtrinsicResult {
Executive::apply_extrinsic(extrinsic)
}
fn finalize_block() -> <Block as BlockT>::Header {
Executive::finalize_block()
}
fn inherent_extrinsics(data: InherentData) -> Vec<<Block as BlockT>::Extrinsic> {
data.create_extrinsics()
}
fn check_inherents(block: Block, data: InherentData) -> CheckInherentsResult {
data.check_extrinsics(&block)
}
}
impl sp_transaction_pool::runtime_api::TaggedTransactionQueue<Block> for Runtime {
fn validate_transaction(
source: TransactionSource,
tx: <Block as BlockT>::Extrinsic,
block_hash: <Block as BlockT>::Hash,
) -> TransactionValidity {
Executive::validate_transaction(source, tx, block_hash)
}
}
impl sp_offchain::OffchainWorkerApi<Block> for Runtime {
fn offchain_worker(header: &<Block as BlockT>::Header) {
Executive::offchain_worker(header)
}
}
impl fg_primitives::GrandpaApi<Block> for Runtime {
fn grandpa_authorities() -> GrandpaAuthorityList {
Grandpa::grandpa_authorities()
}
fn current_set_id() -> fg_primitives::SetId {
Grandpa::current_set_id()
}
fn submit_report_equivocation_unsigned_extrinsic(
equivocation_proof: fg_primitives::EquivocationProof<
<Block as BlockT>::Hash,
NumberFor<Block>,
>,
key_owner_proof: fg_primitives::OpaqueKeyOwnershipProof,
) -> Option<()> {
let key_owner_proof = key_owner_proof.decode()?;
Grandpa::submit_unsigned_equivocation_report(
equivocation_proof,
key_owner_proof,
)
}
fn generate_key_ownership_proof(
_set_id: fg_primitives::SetId,
authority_id: GrandpaId,
) -> Option<fg_primitives::OpaqueKeyOwnershipProof> {
use codec::Encode;
Historical::prove((fg_primitives::KEY_TYPE, authority_id))
.map(|p| p.encode())
.map(fg_primitives::OpaqueKeyOwnershipProof::new)
}
}
impl sp_consensus_babe::BabeApi<Block> for Runtime {
fn configuration() -> sp_consensus_babe::BabeConfiguration {
let epoch_config = Babe::epoch_config().unwrap_or(BABE_GENESIS_EPOCH_CONFIG);
sp_consensus_babe::BabeConfiguration {
slot_duration: Babe::slot_duration(),
epoch_length: EpochDuration::get(),
c: epoch_config.c,
authorities: Babe::authorities().to_vec(),
randomness: Babe::randomness(),
allowed_slots: epoch_config.allowed_slots,
}
}
fn current_epoch_start() -> sp_consensus_babe::Slot {
Babe::current_epoch_start()
}
fn current_epoch() -> sp_consensus_babe::Epoch {
Babe::current_epoch()
}
fn next_epoch() -> sp_consensus_babe::Epoch {
Babe::next_epoch()
}
fn generate_key_ownership_proof(
_slot: sp_consensus_babe::Slot,
authority_id: sp_consensus_babe::AuthorityId,
) -> Option<sp_consensus_babe::OpaqueKeyOwnershipProof> {
use codec::Encode;
Historical::prove((sp_consensus_babe::KEY_TYPE, authority_id))
.map(|p| p.encode())
.map(sp_consensus_babe::OpaqueKeyOwnershipProof::new)
}
fn submit_report_equivocation_unsigned_extrinsic(
equivocation_proof: sp_consensus_babe::EquivocationProof<<Block as BlockT>::Header>,
key_owner_proof: sp_consensus_babe::OpaqueKeyOwnershipProof,
) -> Option<()> {
let key_owner_proof = key_owner_proof.decode()?;
Babe::submit_unsigned_equivocation_report(
equivocation_proof,
key_owner_proof,
)
}
}
impl sp_authority_discovery::AuthorityDiscoveryApi<Block> for Runtime {
fn authorities() -> Vec<AuthorityDiscoveryId> {
AuthorityDiscovery::authorities()
}
}
impl frame_system_rpc_runtime_api::AccountNonceApi<Block, AccountId, Index> for Runtime {
fn account_nonce(account: AccountId) -> Index {
System::account_nonce(account)
}
}
impl pallet_contracts::ContractsApi<Block, AccountId, Balance, BlockNumber, Hash> for Runtime
{
fn call(
origin: AccountId,
dest: AccountId,
value: Balance,
gas_limit: Option<Weight>,
storage_deposit_limit: Option<Balance>,
input_data: Vec<u8>,
) -> pallet_contracts_primitives::ContractExecResult<Balance> {
let gas_limit = gas_limit.unwrap_or(RuntimeBlockWeights::get().max_block);
Contracts::bare_call(origin, dest, value, gas_limit, storage_deposit_limit, input_data, true)
}
fn instantiate(
origin: AccountId,
value: Balance,
gas_limit: Option<Weight>,
storage_deposit_limit: Option<Balance>,
code: pallet_contracts_primitives::Code<Hash>,
data: Vec<u8>,
salt: Vec<u8>,
) -> pallet_contracts_primitives::ContractInstantiateResult<AccountId, Balance>
{
let gas_limit = gas_limit.unwrap_or(RuntimeBlockWeights::get().max_block);
Contracts::bare_instantiate(origin, value, gas_limit, storage_deposit_limit, code, data, salt, true)
}
fn upload_code(
origin: AccountId,
code: Vec<u8>,
storage_deposit_limit: Option<Balance>,
) -> pallet_contracts_primitives::CodeUploadResult<Hash, Balance>
{
Contracts::bare_upload_code(origin, code, storage_deposit_limit)
}
fn get_storage(
address: AccountId,
key: Vec<u8>,
) -> pallet_contracts_primitives::GetStorageResult {
Contracts::get_storage(address, key)
}
}
impl fp_rpc::TxPoolRuntimeRPCApi<Block> for Runtime {
fn extrinsic_filter(
xts_ready: Vec<<Block as BlockT>::Extrinsic>,
xts_future: Vec<<Block as BlockT>::Extrinsic>,
) -> TxPoolResponse {
TxPoolResponse {
ready: xts_ready
.into_iter()
.filter_map(|xt| match xt.0.function {
RuntimeCall::Ethereum(transact { transaction }) => Some(transaction),
_ => None,
})
.collect(),
future: xts_future
.into_iter()
.filter_map(|xt| match xt.0.function {
RuntimeCall::Ethereum(transact { transaction }) => Some(transaction),
_ => None,
})
.collect(),
}
}
}
impl fp_rpc::EthereumRuntimeRPCApi<Block> for Runtime {
fn chain_id() -> u64 {
<Runtime as pallet_evm::Config>::ChainId::get()
}
fn account_basic(address: H160) -> EVMAccount {
let (account, _) = EVM::account_basic(&address);
account
}
fn gas_price() -> U256 {
let (gas_price, _) = <Runtime as pallet_evm::Config>::FeeCalculator::min_gas_price();
gas_price
}
fn account_code_at(address: H160) -> Vec<u8> {
EVM::account_codes(address)
}
fn author() -> H160 {
<pallet_evm::Pallet<Runtime>>::find_author()
}
fn storage_at(address: H160, index: U256) -> H256 {
let mut tmp = [0u8; 32];
index.to_big_endian(&mut tmp);
EVM::account_storages(address, H256::from_slice(&tmp[..]))
}
fn call(
from: H160,
to: H160,
data: Vec<u8>,
value: U256,
gas_limit: U256,
max_fee_per_gas: Option<U256>,
max_priority_fee_per_gas: Option<U256>,
nonce: Option<U256>,
estimate: bool,
access_list: Option<Vec<(H160, Vec<H256>)>>,
) -> Result<pallet_evm::CallInfo, sp_runtime::DispatchError> {
let config = if estimate {
let mut config = <Runtime as pallet_evm::Config>::config().clone();
config.estimate = true;
Some(config)
} else {
None
};
let is_transactional = false;
let validate = true;
let evm_config = config.as_ref().unwrap_or(<Runtime as pallet_evm::Config>::config());
<Runtime as pallet_evm::Config>::Runner::call(
from,
to,
data,
value,
gas_limit.unique_saturated_into(),
max_fee_per_gas,
max_priority_fee_per_gas,
nonce,
access_list.unwrap_or_default(),
is_transactional,
validate,
evm_config,
).map_err(|err| err.error.into())
}
fn create(
from: H160,
data: Vec<u8>,
value: U256,
gas_limit: U256,
max_fee_per_gas: Option<U256>,
max_priority_fee_per_gas: Option<U256>,
nonce: Option<U256>,
estimate: bool,
access_list: Option<Vec<(H160, Vec<H256>)>>,
) -> Result<pallet_evm::CreateInfo, sp_runtime::DispatchError> {
let config = if estimate {
let mut config = <Runtime as pallet_evm::Config>::config().clone();
config.estimate = true;
Some(config)
} else {
None
};
let is_transactional = false;
let validate = true;
let evm_config = config.as_ref().unwrap_or(<Runtime as pallet_evm::Config>::config());
<Runtime as pallet_evm::Config>::Runner::create(
from,
data,
value,
gas_limit.unique_saturated_into(),
max_fee_per_gas,
max_priority_fee_per_gas,
nonce,
access_list.unwrap_or_default(),
is_transactional,
validate,
evm_config,
).map_err(|err| err.error.into())
}
fn current_transaction_statuses() -> Option<Vec<TransactionStatus>> {
Ethereum::current_transaction_statuses()
}
fn current_block() -> Option<pallet_ethereum::Block> {
Ethereum::current_block()
}
fn current_receipts() -> Option<Vec<pallet_ethereum::Receipt>> {
Ethereum::current_receipts()
}
fn current_all() -> (
Option<pallet_ethereum::Block>,
Option<Vec<pallet_ethereum::Receipt>>,
Option<Vec<TransactionStatus>>
) {
(
Ethereum::current_block(),
Ethereum::current_receipts(),
Ethereum::current_transaction_statuses()
)
}
fn extrinsic_filter(
xts: Vec<<Block as BlockT>::Extrinsic>,
) -> Vec<EthereumTransaction> {
xts.into_iter().filter_map(|xt| match xt.0.function {
RuntimeCall::Ethereum(transact { transaction }) => Some(transaction),
_ => None
}).collect::<Vec<EthereumTransaction>>()
}
fn elasticity() -> Option<Permill> {
Some(BaseFee::elasticity())
}
fn gas_limit_multiplier_support() {}
}
impl fp_rpc::ConvertTransactionRuntimeApi<Block> for Runtime {
fn convert_transaction(transaction: EthereumTransaction) -> <Block as BlockT>::Extrinsic {
UncheckedExtrinsic::new_unsigned(
pallet_ethereum::Call::<Runtime>::transact { transaction }.into(),
)
}
}
impl pallet_transaction_payment_rpc_runtime_api::TransactionPaymentApi<
Block,
Balance,
> for Runtime {
fn query_info(uxt: <Block as BlockT>::Extrinsic, len: u32) -> RuntimeDispatchInfo<Balance> {
TransactionPayment::query_info(uxt, len)
}
fn query_fee_details(uxt: <Block as BlockT>::Extrinsic, len: u32) -> FeeDetails<Balance> {
TransactionPayment::query_fee_details(uxt, len)
}
}
impl pallet_transaction_payment_rpc_runtime_api::TransactionPaymentCallApi<Block, Balance, RuntimeCall>
for Runtime
{
fn query_call_info(call: RuntimeCall, len: u32) -> RuntimeDispatchInfo<Balance> {
TransactionPayment::query_call_info(call, len)
}
fn query_call_fee_details(call: RuntimeCall, len: u32) -> FeeDetails<Balance> {
TransactionPayment::query_call_fee_details(call, len)
}
}
impl pallet_mmr::primitives::MmrApi<
Block,
mmr::Hash,
BlockNumber,
> for Runtime {
fn generate_proof(block_number: BlockNumber)
-> Result<(mmr::EncodableOpaqueLeaf, mmr::Proof<mmr::Hash>), mmr::Error>
{
Mmr::generate_batch_proof(vec![block_number]).and_then(|(leaves, proof)|
Ok((
mmr::EncodableOpaqueLeaf::from_leaf(&leaves[0]),
mmr::BatchProof::into_single_leaf_proof(proof)?
))
)
}
fn verify_proof(leaf: mmr::EncodableOpaqueLeaf, proof: mmr::Proof<mmr::Hash>)
-> Result<(), mmr::Error>
{
let leaf: mmr::Leaf = leaf
.into_opaque_leaf()
.try_decode()
.ok_or(mmr::Error::Verify)?;
Mmr::verify_leaves(vec![leaf], mmr::Proof::into_batch_proof(proof))
}
fn verify_proof_stateless(
root: mmr::Hash,
leaf: mmr::EncodableOpaqueLeaf,
proof: mmr::Proof<mmr::Hash>
) -> Result<(), mmr::Error> {
let node = mmr::DataOrHash::Data(leaf.into_opaque_leaf());
pallet_mmr::verify_leaves_proof::<mmr::Hashing, _>(root, vec![node], mmr::Proof::into_batch_proof(proof))
}
fn mmr_root() -> Result<mmr::Hash, mmr::Error> {
Ok(Mmr::mmr_root())
}
fn generate_batch_proof(
block_numbers: Vec<BlockNumber>,
) -> Result<(Vec<mmr::EncodableOpaqueLeaf>, mmr::BatchProof<mmr::Hash>), mmr::Error> {
Mmr::generate_batch_proof(block_numbers).map(|(leaves, proof)| {
(
leaves
.into_iter()
.map(|leaf| mmr::EncodableOpaqueLeaf::from_leaf(&leaf))
.collect(),
proof,
)
})
}
fn generate_historical_batch_proof(
block_numbers: Vec<BlockNumber>,
best_known_block_number: BlockNumber,
) -> Result<(Vec<mmr::EncodableOpaqueLeaf>, mmr::BatchProof<mmr::Hash>), mmr::Error> {
Mmr::generate_historical_batch_proof(block_numbers, best_known_block_number).map(
|(leaves, proof)| {
(
leaves
.into_iter()
.map(|leaf| mmr::EncodableOpaqueLeaf::from_leaf(&leaf))
.collect(),
proof,
)
},
)
}
fn verify_batch_proof(leaves: Vec<mmr::EncodableOpaqueLeaf>, proof: mmr::BatchProof<mmr::Hash>)
-> Result<(), mmr::Error>
{
let leaves = leaves.into_iter().map(|leaf|
leaf.into_opaque_leaf()
.try_decode()
.ok_or(mmr::Error::Verify)).collect::<Result<Vec<mmr::Leaf>, mmr::Error>>()?;
Mmr::verify_leaves(leaves, proof)
}
fn verify_batch_proof_stateless(
root: mmr::Hash,
leaves: Vec<mmr::EncodableOpaqueLeaf>,
proof: mmr::BatchProof<mmr::Hash>
) -> Result<(), mmr::Error> {
let nodes = leaves.into_iter().map(|leaf|mmr::DataOrHash::Data(leaf.into_opaque_leaf())).collect();
pallet_mmr::verify_leaves_proof::<mmr::Hashing, _>(root, nodes, proof)
}
}
impl sp_session::SessionKeys<Block> for Runtime {
fn generate_session_keys(seed: Option<Vec<u8>>) -> Vec<u8> {
SessionKeys::generate(seed)
}
fn decode_session_keys(
encoded: Vec<u8>,
) -> Option<Vec<(Vec<u8>, KeyTypeId)>> {
SessionKeys::decode_into_raw_public_keys(&encoded)
}
}
#[cfg(feature = "runtime-benchmarks")]
impl frame_benchmarking::Benchmark<Block> for Runtime {
fn benchmark_metadata(extra: bool) -> (
Vec<frame_benchmarking::BenchmarkList>,
Vec<frame_support::traits::StorageInfo>,
) {
use frame_benchmarking::{list_benchmark, Benchmarking, BenchmarkList};
use frame_support::traits::StorageInfoTrait;
use frame_system_benchmarking::Pallet as SystemBench;
use pallet_evm::Pallet as PalletEvmBench;
let mut list = Vec::<BenchmarkList>::new();
list_benchmark!(list, extra, pallet_assets, Assets);
list_benchmark!(list, extra, pallet_babe, Babe);
list_benchmark!(list, extra, pallet_balances, Balances);
list_benchmark!(list, extra, pallet_bounties, Bounties);
list_benchmark!(list, extra, pallet_collective, Council);
list_benchmark!(list, extra, pallet_contracts, Contracts);
list_benchmark!(list, extra, pallet_democracy, Democracy);
list_benchmark!(list, extra, pallet_elections_phragmen, Elections);
list_benchmark!(list, extra, pallet_grandpa, Grandpa);
list_benchmark!(list, extra, pallet_identity, Identity);
list_benchmark!(list, extra, pallet_im_online, ImOnline);
list_benchmark!(list, extra, pallet_indices, Indices);
list_benchmark!(list, extra, pallet_lottery, Lottery);
list_benchmark!(list, extra, pallet_mmr, Mmr);
list_benchmark!(list, extra, pallet_multisig, Multisig);
list_benchmark!(list, extra, pallet_proxy, Proxy);
list_benchmark!(list, extra, pallet_scheduler, Scheduler);
list_benchmark!(list, extra, pallet_staking, Staking);
list_benchmark!(list, extra, frame_system, SystemBench::<Runtime>);
list_benchmark!(list, extra, pallet_timestamp, Timestamp);
list_benchmark!(list, extra, pallet_tips, Tips);
list_benchmark!(list, extra, pallet_treasury, Treasury);
list_benchmark!(list, extra, pallet_utility, Utility);
list_benchmark!(list, extra, pallet_vesting, Vesting);
list_benchmark!(list, extra, pallet_credit, Credit);
list_benchmark!(list, extra, pallet_deeper_node, DeeperNode);
list_benchmark!(list, extra, pallet_micropayment, Micropayment);
list_benchmark!(list, extra, pallet_credit_accumulation, CreditAccumulation);
list_benchmark!(list, extra, pallet_evm, PalletEvmBench::<Runtime>);
list_benchmark!(list, extra, pallet_preimage, Preimage);
list_benchmark!(list, extra, pallet_scheduler, Scheduler);
list_benchmark!(list, extra, pallet_operation, Operation);
list_benchmark!(list, extra, pallet_user_privileges, UserPrivileges);
let storage_info = AllPalletsWithSystem::storage_info();
return (list, storage_info)
}
fn dispatch_benchmark(
config: frame_benchmarking::BenchmarkConfig
) -> Result<Vec<frame_benchmarking::BenchmarkBatch>, sp_runtime::RuntimeString> {
use frame_benchmarking::{Benchmarking, BenchmarkBatch, add_benchmark, TrackedStorageKey};
// Trying to add benchmarks directly to the Session Pallet caused cyclic dependency issues.
// To get around that, we separated the Session benchmarks into its own crate, which is why
// we need these two lines below.
use frame_system_benchmarking::Pallet as SystemBench;
use pallet_evm::Pallet as PalletEvmBench;
impl frame_system_benchmarking::Config for Runtime {}
let whitelist: Vec<TrackedStorageKey> = vec![
// Block Number
hex_literal::hex!("26aa394eea5630e07c48ae0c9558cef702a5c1b19ab7a04f536c519aca4983ac").to_vec().into(),
// Total Issuance
hex_literal::hex!("c2261276cc9d1f8598ea4b6a74b15c2f57c875e4cff74148e4628f264b974c80").to_vec().into(),
// Execution Phase
hex_literal::hex!("26aa394eea5630e07c48ae0c9558cef7ff553b5a9862a516939d82b3d3d8661a").to_vec().into(),
// Event Count
hex_literal::hex!("26aa394eea5630e07c48ae0c9558cef70a98fdbe9ce6c55837576c60c7af3850").to_vec().into(),
// System Events
hex_literal::hex!("26aa394eea5630e07c48ae0c9558cef780d41e5e16056765bc8461851072c9d7").to_vec().into(),
// Treasury Account
hex_literal::hex!("26aa394eea5630e07c48ae0c9558cef7b99d880ec681799c0cf30e8886371da95ecffd7b6c0f78751baa9d281e0bfa3a6d6f646c70792f74727372790000000000000000000000000000000000000000").to_vec().into(),
];
let mut batches = Vec::<BenchmarkBatch>::new();
let params = (&config, &whitelist);
add_benchmark!(params, batches, pallet_assets, Assets);
add_benchmark!(params, batches, pallet_babe, Babe);
add_benchmark!(params, batches, pallet_balances, Balances);
add_benchmark!(params, batches, pallet_bounties, Bounties);
add_benchmark!(params, batches, pallet_collective, Council);
add_benchmark!(params, batches, pallet_contracts, Contracts);
add_benchmark!(params, batches, pallet_democracy, Democracy);
add_benchmark!(params, batches, pallet_elections_phragmen, Elections);
add_benchmark!(params, batches, pallet_grandpa, Grandpa);
add_benchmark!(params, batches, pallet_identity, Identity);
add_benchmark!(params, batches, pallet_im_online, ImOnline);
add_benchmark!(params, batches, pallet_indices, Indices);
add_benchmark!(params, batches, pallet_lottery, Lottery);
add_benchmark!(params, batches, pallet_mmr, Mmr);
add_benchmark!(params, batches, pallet_multisig, Multisig);
add_benchmark!(params, batches, pallet_proxy, Proxy);
add_benchmark!(params, batches, pallet_scheduler, Scheduler);
add_benchmark!(params, batches, pallet_staking, Staking);
add_benchmark!(params, batches, frame_system, SystemBench::<Runtime>);
add_benchmark!(params, batches, pallet_timestamp, Timestamp);
add_benchmark!(params, batches, pallet_tips, Tips);
add_benchmark!(params, batches, pallet_treasury, Treasury);
add_benchmark!(params, batches, pallet_utility, Utility);
add_benchmark!(params, batches, pallet_vesting, Vesting);
add_benchmark!(params, batches, pallet_credit, Credit);
add_benchmark!(params, batches, pallet_deeper_node, DeeperNode);
add_benchmark!(params, batches, pallet_micropayment, Micropayment);
add_benchmark!(params, batches, pallet_credit_accumulation, CreditAccumulation);
add_benchmark!(params, batches, pallet_evm, PalletEvmBench::<Runtime>);
add_benchmark!(params, batches, pallet_preimage, Preimage);
add_benchmark!(params, batches, pallet_scheduler, Scheduler);
add_benchmark!(params, batches, pallet_operation, Operation);
add_benchmark!(params, batches, pallet_user_privileges, UserPrivileges);
if batches.is_empty() { return Err("Benchmark not found for this pallet.".into()) }
Ok(batches)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use frame_system::offchain::CreateSignedTransaction;
#[test]
fn validate_transaction_submitter_bounds() {
fn is_submit_signed_transaction<T>()
where
T: CreateSignedTransaction<RuntimeCall>,
{
}
is_submit_signed_transaction::<Runtime>();
}
}
|
use super::super::{components, resources};
use specs::{Builder, World, WorldExt};
pub fn create_in(world: &mut World, pos: components::Position) -> specs::Entity {
let ent = world
.create_entity()
.with(pos)
.with(components::Sprite {
fg_r: 255,
fg_g: 64,
fg_b: 128,
bg_r: 0,
bg_g: 0,
bg_b: 0,
layer: components::DL_ENTITY,
rune: '@',
})
.with(components::Player)
.with(components::material::flesh())
.with(components::Shape::Medium)
.build();
world.insert(resources::Player { ent });
ent
}
|
fn main(){
struct Home {
name: String,
rooms: i32,
sold: bool,
}
let mut home1 = Home {
name: String::from("myhome"),
rooms: 13,
sold: false,
};
home1.sold = true;
let mut home2 = Home {
sold: false,
..home1
};
println!("sold = {}", home2.sold);
println!("rooms = {}", home2.rooms);
} |
#![allow(dead_code)]
use async_trait::async_trait;
fn main() {
println!("codecovsample::main");
}
enum Covered {
Variant1,
Variant2,
}
enum Uncovered {
Variant1,
Variant2,
}
enum PartiallyCovered {
Variant1,
Variant2,
}
fn fn_covered_enum(input: Covered) {
match input {
Covered::Variant1 => { println!("Variant1"); }
Covered::Variant2 => { println!("Variant2"); }
}
}
fn fn_uncovered_enum(input: Uncovered) {
match input {
Uncovered::Variant1 => { println!("Variant1"); }
Uncovered::Variant2 => { println!("Variant2"); }
}
}
fn fn_partially_covered_enum(input: PartiallyCovered) {
match input {
PartiallyCovered::Variant1 => { println!("Variant1"); }
PartiallyCovered::Variant2 => { println!("Variant2"); }
}
}
trait ATrait {
fn covered(&self);
fn uncovered(&self);
fn func_covered();
fn func_uncovered();
fn default_covered(&self) {
println!("default_covered");
}
fn default_uncovered(&self) {
println!("default_uncovered");
}
}
trait BTrait {
fn covered(&self);
fn uncovered(&self);
fn default_covered(&self) {
println!("default_covered");
}
fn default_uncovered(&self) {
println!("default_uncovered");
}
}
struct ATraitImplDirect;
impl ATrait for ATraitImplDirect {
fn covered(&self) {
println!("covered")
}
fn uncovered(&self) {
println!("uncovered");
}
fn func_covered() {
println!("func_covered");
}
fn func_uncovered() {
println!("func_covered");
}
}
struct ATraitImplGeneric;
impl ATrait for ATraitImplGeneric {
fn covered(&self) {
println!("covered")
}
fn uncovered(&self) {
println!("uncovered");
}
fn func_covered() {
println!("func_covered");
}
fn func_uncovered() {
println!("func_covered");
}
}
struct BTraitImplBoxed;
impl BTrait for BTraitImplBoxed {
fn covered(&self) {
println!("covered")
}
fn uncovered(&self) {
println!("uncovered");
}
}
macro_rules! simple_rule {
() => {
println!("simple rule");
};
}
fn call_simple_rule() {
simple_rule!();
}
fn call_generic_atrait<T: ATrait>(input: T) {
input.covered();
input.default_covered();
T::func_covered();
}
async fn async_func() {
println!("async_func");
}
async fn async_func_anon() {
let x = async {
println!("async_func");
};
x.await;
}
#[async_trait]
trait AsyncTrait {
async fn covered(&self);
async fn uncovered(&self);
}
struct AsyncTraitImpl;
#[async_trait]
impl AsyncTrait for AsyncTraitImpl {
async fn covered(&self) {
println!("covered");
async_func_from_trait_covered().await;
}
async fn uncovered(&self) {
println!("uncovered");
}
}
async fn async_func_from_trait_covered() {
println!("covered async func from trait");
}
#[cfg(test)]
mod tests {
use futures::executor::block_on;
use super::*;
#[test]
fn test_main() {
main();
}
#[test]
fn cover_enum() {
fn_covered_enum(Covered::Variant1);
fn_covered_enum(Covered::Variant2);
}
#[test]
fn partially_cover_enum() {
fn_partially_covered_enum(PartiallyCovered::Variant1);
}
#[test]
fn cover_atrait_direct() {
let x = ATraitImplDirect;
x.covered();
x.default_covered();
<ATraitImplDirect as ATrait>::func_covered();
}
#[test]
fn cover_atrait_boxed() {
let x: Box<dyn BTrait> = Box::new(BTraitImplBoxed);
x.covered();
x.default_covered();
}
#[test]
fn cover_simple_rule() {
call_simple_rule();
}
#[test]
fn cover_generic_atrait() {
let x = ATraitImplGeneric;
call_generic_atrait(x);
}
#[test]
fn cover_async_funcs() {
block_on(async {
async_func().await;
async_func_anon().await;
});
}
#[test]
fn cover_async_trait() {
block_on(async {
let x: Box<dyn AsyncTrait> = Box::new(AsyncTraitImpl);
x.covered().await;
});
}
}
|
use std::collections::hash_map::{Entry, HashMap};
use pdb::FallibleIterator;
fn setup<F>(func: F)
where
F: FnOnce(&pdb::SymbolTable<'_>, bool),
{
let (file, is_fixture) = if let Ok(filename) = std::env::var("PDB_FILE") {
(std::fs::File::open(filename).expect("opening file"), false)
} else {
(
std::fs::File::open("fixtures/self/foo.pdb").expect("opening file"),
true,
)
};
let mut pdb = pdb::PDB::open(file).expect("opening pdb");
let symbol_table = pdb.global_symbols().expect("global symbols");
func(&symbol_table, is_fixture);
}
#[test]
fn count_symbols() {
setup(|global_symbols, is_fixture| {
let mut map: HashMap<u16, usize> = HashMap::new();
// walk the symbol table
let mut iter = global_symbols.iter();
while let Some(sym) = iter.next().expect("next symbol") {
let kind = sym.raw_kind();
let entry = map.entry(kind).or_insert(0);
if *entry == 0 && is_fixture {
// first symbol of this kind seen
// emit a unit test
println!("#[test]");
println!("fn kind_{:04x}() {{", sym.raw_kind());
println!(" let buf = &{:?};", sym.raw_bytes());
println!(" let (symbol, data, name) = parse(buf).expect(\"parse\");");
println!(
" assert_eq!(symbol.raw_kind(), 0x{:04x});",
sym.raw_kind()
);
println!(
" assert_eq!(data, SymbolData::{:?});",
sym.parse().expect("parse")
);
println!("}}");
println!();
}
*entry += 1;
}
println!("symbol counts by kind:");
for (kind, count) in &map {
println!(" - kind: 0x{:04x}, count: {}", kind, count);
}
assert!(*map.get(&0x1107).expect("0x1107") >= 500);
assert!(*map.get(&0x1108).expect("0x1108") >= 400);
assert!(*map.get(&0x110c).expect("0x110c") >= 90);
assert!(*map.get(&0x110d).expect("0x110d") >= 120);
assert!(*map.get(&0x110e).expect("0x110e") >= 3000);
assert!(*map.get(&0x110e).expect("0x110e") >= 3000);
assert!(*map.get(&0x1125).expect("0x1125") >= 2000);
assert!(*map.get(&0x1127).expect("0x1127") >= 500);
})
}
#[test]
fn find_symbols() {
setup(|global_symbols, is_fixture| {
// can't do much if we don't know which PDB we're using
if !is_fixture {
return;
}
let mut map: HashMap<&[u8], Option<pdb::SymbolData<'_>>> = HashMap::new();
// look for:
// main(), defined in the program
map.insert(b"main", None);
// malloc(), defined in libc
map.insert(b"memcpy", None);
// HeapAlloc(), defined... somewhere
map.insert(b"HeapAlloc", None);
// Baz::static_f_public(), except MSVC-mangled
map.insert(b"?static_f_public@Baz@@SAXXZ", None);
// walk the symbol table
let mut iter = global_symbols.iter();
while let Some(sym) = iter.next().expect("next symbol") {
// ensure we can parse all the symbols, even though we only want a few
let data = sym.parse().expect("symbol parsing");
// get symbol name
let name = data.name().unwrap_or_default();
if let Entry::Occupied(mut e) = map.entry(name.as_bytes()) {
// this is a symbol we wanted to find
// store our data
e.insert(Some(data));
}
}
for (key, value) in map {
match value {
Some(data) => {
println!("found {} => {:?}", String::from_utf8_lossy(key), data);
}
None => {
panic!("couldn't find {}", String::from_utf8_lossy(key));
}
}
}
})
}
|
#![no_std]
#![no_main]
extern crate kernel;
use kernel::{Signal};
use kernel::graphics::*;
use core::*;
// use kernel::graphics;
// use kernel::graphics::{Framebuffer};
// use core::{slice};
use core::sync::atomic::{AtomicBool,Ordering};
static SHOULD_QUIT: AtomicBool = AtomicBool::new(false);
#[no_mangle]
pub extern "C" fn _start() -> ! {
// println!("Hello, compositor!");
let app_name = "/apps/wallpaper";
unsafe {
kernel::spawn(app_name.as_ptr(), app_name.len());
}
// receive IPC
let pixels = unsafe { kernel::accept(31337) as *mut Pixel };
let screen = unsafe { slice::from_raw_parts_mut(pixels, 800*600) };
for pix in screen.iter_mut() {
pix.r = 0x08;
pix.g = 0x08;
pix.b = 0x08;
pix.a = 0xff;
}
// let Framebuffer {pixels, width, height} = unsafe { graphics::get_framebuffer() };
// println!("Width = {width}, height = {height}", width=width, height=height);
// let screen = unsafe { slice::from_raw_parts_mut(pixels, (width*height) as usize) };
// let mut lum = 0xff_u8;
// println!("Screen created!");
while !SHOULD_QUIT.load(Ordering::Relaxed) {
// Some random animation
// for pix in screen.iter_mut() {
// pix.r = lum;
// pix.g = lum;
// pix.b = lum;
// }
// lum = lum.checked_sub(1).unwrap_or(0xff);
// std::thread::yield_now();
unsafe { kernel::r#yield(); }
}
unsafe { kernel::exit(0); }
}
// TODO: need to register signal handlers instead of looking for magic names
#[no_mangle]
pub extern "C" fn handle_signal(_s: Signal) -> () {
// println!("Got signal: {:?}, quitting!", s);
SHOULD_QUIT.store(true, Ordering::Relaxed);
}
|
#[doc = "Register `SECCFGR2` reader"]
pub type R = crate::R<SECCFGR2_SPEC>;
#[doc = "Register `SECCFGR2` writer"]
pub type W = crate::W<SECCFGR2_SPEC>;
#[doc = "Field `SEC32` reader - SEC32"]
pub type SEC32_R = crate::BitReader;
#[doc = "Field `SEC32` writer - SEC32"]
pub type SEC32_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SEC33` reader - SEC33"]
pub type SEC33_R = crate::BitReader;
#[doc = "Field `SEC33` writer - SEC33"]
pub type SEC33_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SEC34` reader - SEC34"]
pub type SEC34_R = crate::BitReader;
#[doc = "Field `SEC34` writer - SEC34"]
pub type SEC34_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SEC35` reader - SEC35"]
pub type SEC35_R = crate::BitReader;
#[doc = "Field `SEC35` writer - SEC35"]
pub type SEC35_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SEC36` reader - SEC36"]
pub type SEC36_R = crate::BitReader;
#[doc = "Field `SEC36` writer - SEC36"]
pub type SEC36_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SEC37` reader - SEC37"]
pub type SEC37_R = crate::BitReader;
#[doc = "Field `SEC37` writer - SEC37"]
pub type SEC37_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SEC38` reader - SEC38"]
pub type SEC38_R = crate::BitReader;
#[doc = "Field `SEC38` writer - SEC38"]
pub type SEC38_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SEC39` reader - SEC39"]
pub type SEC39_R = crate::BitReader;
#[doc = "Field `SEC39` writer - SEC39"]
pub type SEC39_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SEC40` reader - SEC40"]
pub type SEC40_R = crate::BitReader;
#[doc = "Field `SEC40` writer - SEC40"]
pub type SEC40_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SEC41` reader - SEC41"]
pub type SEC41_R = crate::BitReader;
#[doc = "Field `SEC41` writer - SEC41"]
pub type SEC41_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `SEC42` reader - SEC42"]
pub type SEC42_R = crate::BitReader;
#[doc = "Field `SEC42` writer - SEC42"]
pub type SEC42_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - SEC32"]
#[inline(always)]
pub fn sec32(&self) -> SEC32_R {
SEC32_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - SEC33"]
#[inline(always)]
pub fn sec33(&self) -> SEC33_R {
SEC33_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - SEC34"]
#[inline(always)]
pub fn sec34(&self) -> SEC34_R {
SEC34_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - SEC35"]
#[inline(always)]
pub fn sec35(&self) -> SEC35_R {
SEC35_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - SEC36"]
#[inline(always)]
pub fn sec36(&self) -> SEC36_R {
SEC36_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - SEC37"]
#[inline(always)]
pub fn sec37(&self) -> SEC37_R {
SEC37_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - SEC38"]
#[inline(always)]
pub fn sec38(&self) -> SEC38_R {
SEC38_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - SEC39"]
#[inline(always)]
pub fn sec39(&self) -> SEC39_R {
SEC39_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - SEC40"]
#[inline(always)]
pub fn sec40(&self) -> SEC40_R {
SEC40_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - SEC41"]
#[inline(always)]
pub fn sec41(&self) -> SEC41_R {
SEC41_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - SEC42"]
#[inline(always)]
pub fn sec42(&self) -> SEC42_R {
SEC42_R::new(((self.bits >> 10) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - SEC32"]
#[inline(always)]
#[must_use]
pub fn sec32(&mut self) -> SEC32_W<SECCFGR2_SPEC, 0> {
SEC32_W::new(self)
}
#[doc = "Bit 1 - SEC33"]
#[inline(always)]
#[must_use]
pub fn sec33(&mut self) -> SEC33_W<SECCFGR2_SPEC, 1> {
SEC33_W::new(self)
}
#[doc = "Bit 2 - SEC34"]
#[inline(always)]
#[must_use]
pub fn sec34(&mut self) -> SEC34_W<SECCFGR2_SPEC, 2> {
SEC34_W::new(self)
}
#[doc = "Bit 3 - SEC35"]
#[inline(always)]
#[must_use]
pub fn sec35(&mut self) -> SEC35_W<SECCFGR2_SPEC, 3> {
SEC35_W::new(self)
}
#[doc = "Bit 4 - SEC36"]
#[inline(always)]
#[must_use]
pub fn sec36(&mut self) -> SEC36_W<SECCFGR2_SPEC, 4> {
SEC36_W::new(self)
}
#[doc = "Bit 5 - SEC37"]
#[inline(always)]
#[must_use]
pub fn sec37(&mut self) -> SEC37_W<SECCFGR2_SPEC, 5> {
SEC37_W::new(self)
}
#[doc = "Bit 6 - SEC38"]
#[inline(always)]
#[must_use]
pub fn sec38(&mut self) -> SEC38_W<SECCFGR2_SPEC, 6> {
SEC38_W::new(self)
}
#[doc = "Bit 7 - SEC39"]
#[inline(always)]
#[must_use]
pub fn sec39(&mut self) -> SEC39_W<SECCFGR2_SPEC, 7> {
SEC39_W::new(self)
}
#[doc = "Bit 8 - SEC40"]
#[inline(always)]
#[must_use]
pub fn sec40(&mut self) -> SEC40_W<SECCFGR2_SPEC, 8> {
SEC40_W::new(self)
}
#[doc = "Bit 9 - SEC41"]
#[inline(always)]
#[must_use]
pub fn sec41(&mut self) -> SEC41_W<SECCFGR2_SPEC, 9> {
SEC41_W::new(self)
}
#[doc = "Bit 10 - SEC42"]
#[inline(always)]
#[must_use]
pub fn sec42(&mut self) -> SEC42_W<SECCFGR2_SPEC, 10> {
SEC42_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "EXTI security enable register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`seccfgr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`seccfgr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SECCFGR2_SPEC;
impl crate::RegisterSpec for SECCFGR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`seccfgr2::R`](R) reader structure"]
impl crate::Readable for SECCFGR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`seccfgr2::W`](W) writer structure"]
impl crate::Writable for SECCFGR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SECCFGR2 to value 0"]
impl crate::Resettable for SECCFGR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::fmt;
use std::fmt::{Display, Formatter};
use std::ops::SubAssign;
use crate::utils::plural;
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum Resource {
Wood,
Stone,
Ore,
Clay,
Glass,
Loom,
Papyrus,
}
impl Display for Resource {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match self {
Resource::Wood => "wood",
Resource::Stone => "stone",
Resource::Ore => "ore",
Resource::Clay => "clay",
Resource::Glass => "glass",
Resource::Loom => "loom",
Resource::Papyrus => "papyrus",
}
)
}
}
/// The cost of a card.
#[derive(Default, Debug, Clone)]
pub struct Cost {
pub coins: i32,
pub wood: i32,
pub stone: i32,
pub ore: i32,
pub clay: i32,
pub glass: i32,
pub loom: i32,
pub papyrus: i32,
}
impl Cost {
pub fn free() -> Cost {
Cost { ..Default::default() }
}
pub fn coins(num: i32) -> Cost {
Cost {
coins: num,
..Default::default()
}
}
pub fn wood(num: i32) -> Cost {
Cost {
wood: num,
..Default::default()
}
}
pub fn stone(num: i32) -> Cost {
Cost {
stone: num,
..Default::default()
}
}
pub fn ore(num: i32) -> Cost {
Cost {
ore: num,
..Default::default()
}
}
pub fn clay(num: i32) -> Cost {
Cost {
clay: num,
..Default::default()
}
}
pub fn glass(num: i32) -> Cost {
Cost {
glass: num,
..Default::default()
}
}
pub fn loom(num: i32) -> Cost {
Cost {
loom: num,
..Default::default()
}
}
pub fn papyrus(num: i32) -> Cost {
Cost {
papyrus: num,
..Default::default()
}
}
/// Returns true if and only if all individual resource counts are at zero or below. If a cost is initialised as a
/// Cost object and then available resources are subtracted from it, then this returns true if there were enough
/// resources to afford the cost.
pub fn satisfied(&self) -> bool {
self.coins <= 0
&& self.wood <= 0
&& self.stone <= 0
&& self.ore <= 0
&& self.clay <= 0
&& self.glass <= 0
&& self.loom <= 0
&& self.papyrus <= 0
}
/// Returns true if and only if this cost includes at least one of the given resource.
pub fn has(&self, resource: &Resource) -> bool {
match resource {
Resource::Wood => self.wood > 0,
Resource::Stone => self.stone > 0,
Resource::Ore => self.ore > 0,
Resource::Clay => self.clay > 0,
Resource::Glass => self.glass > 0,
Resource::Loom => self.loom > 0,
Resource::Papyrus => self.papyrus > 0,
}
}
}
impl SubAssign<&Resource> for Cost {
fn sub_assign(&mut self, resource: &Resource) {
match resource {
Resource::Wood => self.wood -= 1,
Resource::Stone => self.stone -= 1,
Resource::Ore => self.ore -= 1,
Resource::Clay => self.clay -= 1,
Resource::Glass => self.glass -= 1,
Resource::Loom => self.loom -= 1,
Resource::Papyrus => self.papyrus -= 1,
}
}
}
/// Example formatting: `2 wood, 1 glass, 1 papyrus`
impl Display for Cost {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
fn add_if_non_zero(count: i32, resource: &str, resources: &mut Vec<String>) {
if count > 0 {
resources.push(format!("{} {}", count, resource));
}
}
let mut resources: Vec<String> = Vec::new();
if self.coins > 0 {
resources.push(plural(self.coins, "coin"));
}
add_if_non_zero(self.wood, "wood", &mut resources);
add_if_non_zero(self.stone, "stone", &mut resources);
add_if_non_zero(self.ore, "ore", &mut resources);
add_if_non_zero(self.clay, "clay", &mut resources);
add_if_non_zero(self.glass, "glass", &mut resources);
add_if_non_zero(self.loom, "loom", &mut resources);
add_if_non_zero(self.papyrus, "papyrus", &mut resources);
if resources.is_empty() {
write!(f, "free")
} else {
write!(f, "{}", resources.join(", "))
}
}
}
|
mod canvas;
mod conditional;
mod editor;
mod modal;
mod named;
mod painter;
mod radio;
pub mod notif_bar;
pub use conditional::Conditional;
pub use editor::{Editor, Tool, ToolCtx, ToolKind};
pub use modal::{Modal, ModalContainer};
pub use named::Named;
pub use painter::Painter;
pub use radio::RadioGroup;
|
pub struct SimpleLinkedList<T> {
head: Link<T>,
}
type Link<T> = Option<Box<Node<T>>>;
pub struct Node<T> {
data: T,
next: Link<T>,
}
impl<T> SimpleLinkedList<T> {
pub fn new() -> Self {
SimpleLinkedList { head: None }
}
pub fn len(&self) -> usize {
let mut length = 0;
let mut current = &self.head;
while current.is_some() {
length += 1;
current = current.as_ref().map(|node| &node.next).unwrap();
}
length
}
pub fn push(&mut self, _element: T) {
let new_node = Box::new(Node {
data: _element,
next: self.head.take(),
});
self.head = Some(new_node);
}
pub fn pop(&mut self) -> Option<T> {
self.head.take().map(|node| {
self.head = node.next;
node.data
})
}
pub fn peek(&self) -> Option<&T> {
self.head.as_ref().map(|node| &node.data)
}
}
impl<T: Clone> SimpleLinkedList<T> {
pub fn rev(&self) -> SimpleLinkedList<T> {
let mut reversed = SimpleLinkedList::new();
let mut current = &self.head;
while let Some(ref node) = *current {
reversed.push(node.data.clone());
current = &node.next;
}
reversed
}
}
impl<'a, T: Clone> From<&'a [T]> for SimpleLinkedList<T> {
fn from(_item: &[T]) -> Self {
_item
.iter()
.fold(SimpleLinkedList::new(), |mut list, data| {
list.push(data.clone());
list
})
}
}
impl<T> Into<Vec<T>> for SimpleLinkedList<T> {
fn into(mut self) -> Vec<T> {
let mut vec = vec![];
while let Some(data) = self.pop() {
vec.insert(0, data);
}
vec
}
}
|
use anyhow::Result;
use std::fmt;
use std::fs::File;
use std::fs::OpenOptions;
use std::io::prelude::*;
use std::mem;
use std::os::unix::io::AsRawFd;
use std::slice;
pub struct DmaBuffer {
name: String,
size: usize,
phys_addr: usize,
buffer: *mut libc::c_void,
sync_mode: bool,
debug_vma: bool,
}
impl fmt::Debug for DmaBuffer {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "DmaBuffer ({})", &self.name)?;
writeln!(f, " size: {:#x?}", &self.size)?;
writeln!(f, " phys_addr: {:#x?}", &self.phys_addr)?;
writeln!(f, " buffer: {:?}", &self.buffer)?;
writeln!(f, " sync_mode: {:?}", &self.sync_mode)?;
write!(f, " debug_vma: {:?}", &self.debug_vma)
}
}
impl DmaBuffer {
pub fn new(name: &str) -> Result<DmaBuffer> {
let phy_f = format!("/sys/class/u-dma-buf/{}/phys_addr", name);
let mut phy_f = File::open(phy_f)?;
let mut buff = String::new();
phy_f.read_to_string(&mut buff)?;
let buff = buff.trim().trim_start_matches("0x");
let phys_addr = usize::from_str_radix(buff, 16)?;
let size_f = format!("/sys/class/u-dma-buf/{}/size", name);
let mut size_f = File::open(size_f)?;
let mut buff = String::new();
size_f.read_to_string(&mut buff)?;
let buff = buff.trim();
let size = buff.parse::<usize>()?;
let debug_f = format!("/sys/class/u-dma-buf/{}/debug_vma", name);
let mut debug_f = File::open(debug_f)?;
let mut buff = String::new();
debug_f.read_to_string(&mut buff)?;
let debug_vma = buff.trim() != "0";
let sync_f = format!("/sys/class/u-dma-buf/{}/sync_mode", name);
let mut sync_f = File::open(sync_f)?;
let mut buff = String::new();
sync_f.read_to_string(&mut buff)?;
let sync_mode = buff.trim() != "0";
let dev = format!("/dev/{}", name);
let dev = OpenOptions::new().read(true).write(true).open(dev)?;
let buffer;
unsafe {
buffer = libc::mmap(
std::ptr::null_mut::<libc::c_void>(),
size,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_SHARED,
dev.as_raw_fd(),
0,
);
if buffer == libc::MAP_FAILED {
anyhow::bail!("mapping dma buffer into virtual memory failed");
}
}
Ok(DmaBuffer {
name: name.to_string(),
size,
phys_addr,
buffer,
sync_mode,
debug_vma,
})
}
#[allow(clippy::mut_from_ref)]
pub fn slice<T>(&self) -> &mut [T] {
unsafe { slice::from_raw_parts_mut(self.buffer as *mut T, self.size / mem::size_of::<T>()) }
}
pub fn name(&self) -> &str {
&self.name
}
pub fn size(&self) -> usize {
self.size
}
pub fn phys_addr(&self) -> usize {
self.phys_addr
}
pub fn buffer(&self) -> *mut libc::c_void {
self.buffer
}
pub fn sync_mode(&self) -> bool {
self.sync_mode
}
pub fn debug_vma(&self) -> bool {
self.debug_vma
}
}
impl Drop for DmaBuffer {
fn drop(&mut self) {
unsafe {
libc::munmap(self.buffer, self.size);
}
}
}
unsafe impl Send for DmaBuffer {}
unsafe impl Sync for DmaBuffer {}
|
use std::collections::HashMap;
pub fn test() {
demo_vec();
demo_string();
}
fn demo_vec() {
demo_vec_access();
demo_vec_iteration();
demo_vec_enum();
}
fn demo_vec_access() {
let v = vec![1, 2, 3, 4, 5];
let _first = v.get(0);
let _third: &i32 = &v[2];
let v_index = 2;
match v.get(v_index) {
Some(_) => { println!("Reachable element at index: {}", v_index); }
None => { println!("Unreachable element at index: {}", v_index); }
}
// Causes panic!
// let does_not_exist = &v[100];
// Returns None
// let does_not_exist = v.get(100);
// Doesn't compile due to mutable borrows of v.get(0) and &v[2] above
// v.push(6);
}
fn demo_vec_iteration() {
let mut v = vec![100, 32, 57];
for i in &v {
println!("{}", i);
}
for i in &mut v {
*i += 50;
}
for i in &v {
println!("{}", i);
}
}
fn demo_vec_enum() {
enum SpreadsheetCell {
Int(i32),
Float(f64),
Text(String),
}
let _row = vec![
SpreadsheetCell::Int(3),
SpreadsheetCell::Text(String::from("blue")),
SpreadsheetCell::Float(10.12),
];
}
fn demo_string() {
demo_string_create();
demo_string_mutate();
demo_string_concatenate();
demo_string_index();
}
fn demo_string_create() {
let _s = String::new();
let data = "initial contents";
let _s2 = data.to_string();
let _s3 = "initial contents".to_string();
let _s4 = String::from("initial contents");
}
fn demo_string_mutate() {
let mut sfoo = String::from("foo");
sfoo.push_str("bar");
println!("sfoo is {}", sfoo);
let mut sfoo2 = String::from("foo");
let sbar = "bar";
sfoo2.push_str(sbar);
println!("sfoo2 is {}", sfoo2);
let mut slol = String::from("lo");
slol.push('l'); //push takes a single character
println!("slol is {}", slol);
}
fn demo_string_concatenate() {
let s1 = String::from("Hello, ");
let s2 = String::from("world!");
let s3 = s1 + &s2; //Note s1 has been moved here and can no longer be used
println!("s2 is {} and s3 is {}", s2, s3);
let s4 = s3.clone() + " Is anyone there?"; //cloning the string creates a new copy, which means the original is still available
println!("s4 is {}", s4);
let s5 = format!("{}{}", s3, String::from(" Is anyone there?")); //alternately, we can use the format! macro, which doesn't take ownership of anything
println!("s5 is {}", s5);
}
fn demo_string_index() {
let _s1 = String::from("hello");
// This fails because Strings are wrappers around Vec<u8>, meaning a naive indexer would potentially return half of a UTF-8 codepoint
// let h = s1[0];
let _s2 = String::from("नमस्ते");
// s2 as bytes
// [224, 164, 168, 224, 164, 174, 224, 164, 184, 224, 165, 141, 224, 164, 164, 224, 165, 135]
// s2 as Unicode values
// ['न', 'म', 'स', '्', 'त', 'े']
// s2 as graphemes
// ["न", "म", "स्", "ते"]
let s3 = String::from("Здравствуйте");
let s4 = &s3[0..4];
println!("s4 is {}", s4);
// This would cause a panic!
// `thread 'main' panicked at 'byte index 1 is not a char boundary`
// let s5 = &s3[0..1];
let mut chars = String::new();
chars.push('[');
for c in s3.chars() {
chars.push(c);
chars.push_str(", ");
}
chars.push(']');
println!("chars is {}", chars);
let mut bytes = String::new();
bytes.push('[');
for b in s3.bytes() {
bytes.push_str(&format!("{}, ", b));
}
bytes.push(']');
println!("bytes is {}", bytes);
}
#[cfg(test)]
mod test; |
/*
* Copyright Stalwart Labs Ltd. See the COPYING
* file at the top-level directory of this distribution.
*
* Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
* https://www.apache.org/licenses/LICENSE-2.0> or the MIT license
* <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your
* option. This file may not be copied, modified, or distributed
* except according to those terms.
*/
pub mod get;
pub mod helpers;
pub mod query;
pub mod set;
use ahash::AHashMap;
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use std::fmt::Display;
use crate::{
core::{changes::ChangesObject, Object},
email::Email,
Get, Set,
};
#[derive(Debug, Clone, Serialize, Default)]
pub struct SetArguments {
#[serde(rename = "onSuccessUpdateEmail")]
#[serde(skip_serializing_if = "Option::is_none")]
on_success_update_email: Option<AHashMap<String, Email<Set>>>,
#[serde(rename = "onSuccessDestroyEmail")]
#[serde(skip_serializing_if = "Option::is_none")]
on_success_destroy_email: Option<Vec<String>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EmailSubmission<State = Get> {
#[serde(skip)]
_create_id: Option<usize>,
#[serde(skip)]
_state: std::marker::PhantomData<State>,
#[serde(rename = "id")]
#[serde(skip_serializing_if = "Option::is_none")]
id: Option<String>,
#[serde(rename = "identityId")]
#[serde(skip_serializing_if = "Option::is_none")]
identity_id: Option<String>,
#[serde(rename = "emailId")]
#[serde(skip_serializing_if = "Option::is_none")]
email_id: Option<String>,
#[serde(rename = "threadId")]
#[serde(skip_serializing_if = "Option::is_none")]
thread_id: Option<String>,
#[serde(rename = "envelope")]
#[serde(skip_serializing_if = "Option::is_none")]
envelope: Option<Envelope>,
#[serde(rename = "sendAt")]
#[serde(skip_serializing_if = "Option::is_none")]
send_at: Option<DateTime<Utc>>,
#[serde(rename = "undoStatus")]
#[serde(skip_serializing_if = "Option::is_none")]
undo_status: Option<UndoStatus>,
#[serde(rename = "deliveryStatus")]
#[serde(skip_serializing_if = "Option::is_none")]
delivery_status: Option<AHashMap<String, DeliveryStatus>>,
#[serde(rename = "dsnBlobIds")]
#[serde(skip_serializing_if = "Option::is_none")]
dsn_blob_ids: Option<Vec<String>>,
#[serde(rename = "mdnBlobIds")]
#[serde(skip_serializing_if = "Option::is_none")]
mdn_blob_ids: Option<Vec<String>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Envelope {
#[serde(rename = "mailFrom")]
mail_from: Address,
#[serde(rename = "rcptTo")]
rcpt_to: Vec<Address>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Address<State = Get> {
#[serde(skip)]
_state: std::marker::PhantomData<State>,
email: String,
parameters: Option<AHashMap<String, Option<String>>>,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
pub enum UndoStatus {
#[serde(rename = "pending")]
Pending,
#[serde(rename = "final")]
Final,
#[serde(rename = "canceled")]
Canceled,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct DeliveryStatus {
#[serde(rename = "smtpReply")]
smtp_reply: String,
#[serde(rename = "delivered")]
delivered: Delivered,
#[serde(rename = "displayed")]
displayed: Displayed,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
pub enum Delivered {
#[serde(rename = "queued")]
Queued,
#[serde(rename = "yes")]
Yes,
#[serde(rename = "no")]
No,
#[serde(rename = "unknown")]
Unknown,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
pub enum Displayed {
#[serde(rename = "unknown")]
Unknown,
#[serde(rename = "yes")]
Yes,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Hash, Copy)]
pub enum Property {
#[serde(rename = "id")]
Id,
#[serde(rename = "identityId")]
IdentityId,
#[serde(rename = "emailId")]
EmailId,
#[serde(rename = "threadId")]
ThreadId,
#[serde(rename = "envelope")]
Envelope,
#[serde(rename = "sendAt")]
SendAt,
#[serde(rename = "undoStatus")]
UndoStatus,
#[serde(rename = "deliveryStatus")]
DeliveryStatus,
#[serde(rename = "dsnBlobIds")]
DsnBlobIds,
#[serde(rename = "mdnBlobIds")]
MdnBlobIds,
}
impl Display for Property {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Property::Id => write!(f, "id"),
Property::IdentityId => write!(f, "identityId"),
Property::EmailId => write!(f, "emailId"),
Property::ThreadId => write!(f, "threadId"),
Property::Envelope => write!(f, "envelope"),
Property::SendAt => write!(f, "sendAt"),
Property::UndoStatus => write!(f, "undoStatus"),
Property::DeliveryStatus => write!(f, "deliveryStatus"),
Property::DsnBlobIds => write!(f, "dsnBlobIds"),
Property::MdnBlobIds => write!(f, "mdnBlobIds"),
}
}
}
impl Object for EmailSubmission<Set> {
type Property = Property;
fn requires_account_id() -> bool {
true
}
}
impl Object for EmailSubmission<Get> {
type Property = Property;
fn requires_account_id() -> bool {
true
}
}
impl ChangesObject for EmailSubmission<Set> {
type ChangesResponse = ();
}
impl ChangesObject for EmailSubmission<Get> {
type ChangesResponse = ();
}
|
pub struct Parameters {
pub bot_name: String,
pub pro_chat_id: teloxide::types::ChatId,
pub pro_chat_username: String,
pub supapro_chat_id: teloxide::types::ChatId,
pub supapro_chat_username: String,
pub is_webhook_mode_enabled: bool,
}
impl Parameters {
pub fn new() -> Self {
let bot_name = std::env::var("BOT_NAME").expect("BOT_NAME env var is not specified");
let pro_chat_id = teloxide::types::ChatId(
std::env::var("PRO_CHAT_ID")
.expect("SUPAPRO_CHAT_ID env var is not specified")
.parse()
.expect("Cannot parse as i64"),
);
let pro_chat_username: String =
std::env::var("PRO_CHAT_USERNAME").expect("PRO_CHAT_USERNAME env var is not specified");
let supapro_chat_id = teloxide::types::ChatId(
std::env::var("SUPAPRO_CHAT_ID")
.expect("SUPAPRO_CHAT_ID env var is not specified")
.parse()
.expect("Cannot parse as i64"),
);
let supapro_chat_username: String = std::env::var("SUPAPRO_CHAT_USERNAME")
.expect("SUPAPRO_CHAT_USERNAME env var is not specified");
let is_webhook_mode_enabled: bool = std::env::var("WEBHOOK_MODE")
.unwrap_or("false".to_string())
.parse()
.expect(
"Cannot convert WEBHOOK_MODE to bool. Applicable values are only \"true\" or \"false\"",
);
Self {
bot_name,
pro_chat_id,
pro_chat_username,
supapro_chat_id,
supapro_chat_username,
is_webhook_mode_enabled,
}
}
}
|
use crate::*;
use axum::http::Uri;
use miette::Result;
use uuid::Uuid;
#[derive(Debug, Clone)]
pub(crate) struct GithubConfig {
pub(crate) app_id: u64,
pub(crate) client_id: String,
pub(crate) client_secret: String,
}
impl GithubConfig {
#[instrument]
pub(crate) fn from_env() -> Result<Self> {
Ok(Self {
app_id: std::env::var("GITHUB_APP_ID")
.into_diagnostic()?
.parse()
.into_diagnostic()?,
client_id: std::env::var("GITHUB_APP_CLIENT_ID").into_diagnostic()?,
client_secret: std::env::var("GITHUB_APP_CLIENT_SECRET").into_diagnostic()?,
})
}
}
pub(crate) async fn generate_user_github_link(config: &AppState, user_id: i64) -> Result<Uri> {
let client_id = &config.github.client_id;
let redirect_uri = github_redirect_uri(config);
let state = Uuid::new_v4().to_string();
sqlx::query!(
"INSERT INTO UserGithubLinkStates (user_id, state) VALUES (?, ?)",
user_id,
state,
)
.execute(&config.db_pool)
.await
.into_diagnostic()?;
Uri::builder()
.scheme("https")
.authority("github.com")
.path_and_query(format!("/login/oauth/authorize?client_id={client_id}&redirect_uri={redirect_uri}&state={state}"))
.build().into_diagnostic()
}
pub(crate) fn github_redirect_uri(config: &AppState) -> String {
format!("{}/github_oauth", config.app.base_url)
}
|
// ClientConfig
#![forbid(unsafe_code)]
#![deny(missing_docs)]
use super::{
ClientMode,
Region,
};
#[cfg(feature = "s3")]
use super::ObjectVersions;
/// Client configuration.
#[derive(Debug)]
pub struct ClientConfig {
/// The bucket name that the client should report the size of.
///
/// If this isn't given, all discovered S3 buckets will have their sizes
/// reported.
pub bucket_name: Option<String>,
/// The mode that `s3du` will run in.
///
/// This selects which AWS client will be used.
pub mode: ClientMode,
/// The region that our AWS client should be created in.
///
/// This will affect bucket discovery.
pub region: Region,
/// The S3 object versions that should be used when calculating the bucket
/// size.
///
/// This only has an effect when running in S3 mode and the field will only
/// be present when compiled with the `s3` feature.
#[cfg(feature = "s3")]
pub object_versions: ObjectVersions,
/// The S3 Endpoint that we're going to connect to for bucket operations.
///
/// This only has an effect when running in S3 mode and the field will only
/// be present when compiled with the `s3` feature.
#[cfg(feature = "s3")]
pub endpoint: Option<String>,
}
impl Default for ClientConfig {
/// Returns a default `ClientConfig`.
///
/// If compiled with the `cloudwatch` feature, `CloudWatch` will be the
/// default `ClientMode`, otherwise `S3` will be the default.
///
/// If compiled without the `s3` feature, the `endpoint` and
/// `object_versions` fields will be absent.
///
/// ```rust
/// ClientConfig {
/// bucket_name: None,
/// endpoint: None,
/// mode: ClientMode::CloudWatch,
/// object_versions: ObjectVersions::Current,
/// region: Region::new(),
/// }
/// ```
fn default() -> Self {
#[cfg(feature = "cloudwatch")]
let mode = ClientMode::CloudWatch;
#[cfg(all(feature = "s3", not(feature = "cloudwatch")))]
let mode = ClientMode::S3;
//let region = tokio::runtime::Runtime::new()
// .expect("tokio runtime")
// .block_on(Region::new());
let region = Region::new();
Self {
bucket_name: None,
mode: mode,
region: region,
#[cfg(feature = "s3")]
endpoint: None,
#[cfg(feature = "s3")]
object_versions: ObjectVersions::Current,
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.