text stringlengths 8 4.13M |
|---|
struct Solution();
impl Solution {
pub fn three_sum_closest(nums: Vec<i32>, target: i32) -> i32 {
let length=nums.len();
let mut j;
let mut k;
let mut num_closest=i32::MAX;
let mut result=0;
let mut nums=nums;
let mut sum_;
nums.sort();//先排序
for i in 0..length{
j=i+1;//j指向第二个元素
k=length-1; //k指向第三个元素
loop{
if j>=k{
break;
}
sum_=nums[i]+nums[j]+nums[k];
if sum_>target{
k-=1;
}else if sum_<target{
j+=1;
}else{
return sum_
}
if i32::abs(sum_-target)<num_closest{
num_closest=i32::abs(sum_-target);
result=sum_;
}
}
}
result
}
}
fn main(){
println!("{}",Solution::three_sum_closest(vec![-1,2,1,-4],1));
} |
extern crate mo_gc;
use mo_gc::{Gc, GcRoot, GcThread, StatsLogger, Trace, TraceOps, TraceStack};
struct Segment {
next: Gc<Segment>,
}
impl Segment {
fn new() -> Segment {
Segment {
next: Gc::null()
}
}
fn join_to(&mut self, to: Gc<Segment>) {
self.next = to;
}
}
unsafe impl Trace for Segment {
fn traversible(&self) -> bool {
true
}
unsafe fn trace(&self, heap: &mut TraceStack) {
if let Some(ptr) = self.next.as_raw() {
heap.push_to_trace(&*ptr);
}
}
}
struct Balloon {
head: Gc<Segment>,
tail: Gc<Segment>,
}
impl Balloon {
fn inflate() -> Balloon {
let body = Gc::new(Segment::new());
Balloon {
head: body,
tail: body,
}
}
fn twist(&mut self) {
let mut new_seg = Gc::new(Segment::new());
new_seg.join_to(self.head);
self.head = new_seg;
}
fn complete(&mut self) {
self.tail.next = self.head;
}
fn count(&mut self) {
let mut count = 0;
let mut current = self.head;
loop {
current = current.next;
count += 1;
if current.is(self.tail) {
break;
}
}
if count != 1000 {
println!("snake is short - only {} segments", count);
}
}
}
unsafe impl Trace for Balloon {
fn traversible(&self) -> bool {
true
}
unsafe fn trace(&self, heap: &mut TraceStack) {
heap.push_to_trace(&*self.head as &Trace);
}
}
fn snake() {
// this many snake balloons
for _snake in 0..5000 {
let mut balloon = GcRoot::new(Balloon::inflate());
// with this many segments each
for _segment in 0..1000 {
balloon.twist();
}
balloon.complete();
balloon.count();
}
}
fn main() {
let gc = GcThread::spawn_gc();
let snake_handle = gc.spawn(|| snake());
let logger = gc.join().expect("gc failed");
logger.dump_to_stdout();
snake_handle.join().expect("snake failed");
}
|
use super::{AssetId, *};
use codec::{Decode, Encode};
use cumulus_primitives_core::ParaId;
use frame_support::traits::{Everything, Nothing};
pub use orml_xcm_support::{IsNativeConcrete, MultiCurrencyAdapter, MultiNativeAsset};
use pallet_xcm::XcmPassthrough;
use polkadot_parachain::primitives::Sibling;
use polkadot_xcm::latest::prelude::*;
use polkadot_xcm::latest::Error;
use sp_runtime::traits::Convert;
use xcm_builder::{
AccountId32Aliases, AllowTopLevelPaidExecutionFrom, EnsureXcmOrigin, FixedWeightBounds, LocationInverter,
ParentIsDefault, RelayChainAsNative, SiblingParachainAsNative, SiblingParachainConvertsVia,
SignedAccountId32AsNative, SignedToAccountId32, SovereignSignedViaLocation, TakeWeightCredit,
};
use xcm_executor::traits::WeightTrader;
use xcm_executor::{Assets, Config, XcmExecutor};
pub type LocalOriginToLocation = SignedToAccountId32<Origin, AccountId, RelayNetwork>;
pub type Barrier = (TakeWeightCredit, AllowTopLevelPaidExecutionFrom<Everything>);
parameter_types! {
pub SelfLocation: MultiLocation = MultiLocation::new(1, X1(Parachain(ParachainInfo::get().into())));
}
parameter_types! {
pub const RelayNetwork: NetworkId = NetworkId::Kusama;
pub RelayChainOrigin: Origin = cumulus_pallet_xcm::Origin::Relay.into();
pub Ancestry: MultiLocation = Parachain(ParachainInfo::parachain_id().into()).into();
}
/// This is the type we use to convert an (incoming) XCM origin into a local `Origin` instance,
/// ready for dispatching a transaction with Xcm's `Transact`. There is an `OriginKind` which can
/// biases the kind of local `Origin` it will become.
pub type XcmOriginToCallOrigin = (
// Sovereign account converter; this attempts to derive an `AccountId` from the origin location
// using `LocationToAccountId` and then turn that into the usual `Signed` origin. Useful for
// foreign chains who want to have a local sovereign account on this chain which they control.
SovereignSignedViaLocation<LocationToAccountId, Origin>,
// Native converter for Relay-chain (Parent) location; will converts to a `Relay` origin when
// recognized.
RelayChainAsNative<RelayChainOrigin, Origin>,
// Native converter for sibling Parachains; will convert to a `SiblingPara` origin when
// recognized.
SiblingParachainAsNative<cumulus_pallet_xcm::Origin, Origin>,
// Native signed account converter; this just converts an `AccountId32` origin into a normal
// `Origin::Signed` origin of the same 32-byte value.
SignedAccountId32AsNative<RelayNetwork, Origin>,
// Xcm origins can be represented natively under the Xcm pallet's Xcm origin.
XcmPassthrough<Origin>,
);
parameter_types! {
/// The amount of weight an XCM operation takes. This is a safe overestimate.
pub const BaseXcmWeight: Weight = 100_000_000;
}
pub struct TradePassthrough();
impl WeightTrader for TradePassthrough {
fn new() -> Self {
Self()
}
fn buy_weight(&mut self, _weight: Weight, payment: Assets) -> Result<Assets, Error> {
// Just let it through for now
Ok(payment)
}
}
pub struct XcmConfig;
impl Config for XcmConfig {
type Call = Call;
type XcmSender = XcmRouter;
type AssetTransactor = LocalAssetTransactor;
type OriginConverter = XcmOriginToCallOrigin;
type IsReserve = MultiNativeAsset;
type IsTeleporter = (); // disabled
type LocationInverter = LocationInverter<Ancestry>;
type Barrier = Barrier;
type Weigher = FixedWeightBounds<BaseXcmWeight, Call>;
type Trader = TradePassthrough;
type ResponseHandler = (); // Don't handle responses for now.
type SubscriptionService = PolkadotXcm;
}
impl cumulus_pallet_xcm::Config for Runtime {
type Event = Event;
type XcmExecutor = XcmExecutor<XcmConfig>;
}
impl cumulus_pallet_xcmp_queue::Config for Runtime {
type Event = Event;
type XcmExecutor = XcmExecutor<XcmConfig>;
type ChannelInfo = ParachainSystem;
type VersionWrapper = ();
}
impl cumulus_pallet_dmp_queue::Config for Runtime {
type Event = Event;
type XcmExecutor = XcmExecutor<XcmConfig>;
type ExecuteOverweightOrigin = EnsureRoot<AccountId>;
}
impl orml_xtokens::Config for Runtime {
type Event = Event;
type Balance = Balance;
type CurrencyId = AssetId;
type CurrencyIdConvert = CurrencyIdConvert;
type AccountIdToMultiLocation = AccountIdToMultiLocation;
type SelfLocation = SelfLocation;
type XcmExecutor = XcmExecutor<XcmConfig>;
type Weigher = FixedWeightBounds<BaseXcmWeight, Call>;
type BaseXcmWeight = BaseXcmWeight;
type LocationInverter = LocationInverter<Ancestry>;
}
impl orml_unknown_tokens::Config for Runtime {
type Event = Event;
}
impl pallet_xcm::Config for Runtime {
type Event = Event;
type SendXcmOrigin = EnsureXcmOrigin<Origin, LocalOriginToLocation>;
type XcmRouter = XcmRouter;
type ExecuteXcmOrigin = EnsureXcmOrigin<Origin, LocalOriginToLocation>;
type XcmExecuteFilter = Everything;
type XcmExecutor = XcmExecutor<XcmConfig>;
type XcmTeleportFilter = ();
type XcmReserveTransferFilter = Nothing;
type Weigher = FixedWeightBounds<BaseXcmWeight, Call>;
type LocationInverter = LocationInverter<Ancestry>;
}
pub struct CurrencyIdConvert;
// Note: stub implementation
impl Convert<AssetId, Option<MultiLocation>> for CurrencyIdConvert {
fn convert(id: AssetId) -> Option<MultiLocation> {
match id {
0 => Some(MultiLocation::new(
1,
X2(Parachain(ParachainInfo::get().into()), GeneralKey(id.encode())),
)),
_ => {
if let Some(loc) = AssetRegistry::asset_to_location(id) {
Some(loc.0)
} else {
None
}
}
}
}
}
impl Convert<MultiLocation, Option<AssetId>> for CurrencyIdConvert {
fn convert(location: MultiLocation) -> Option<AssetId> {
match location {
MultiLocation {
parents,
interior: X2(Parachain(id), GeneralKey(key)),
} if parents == 1 && ParaId::from(id) == ParachainInfo::get() => {
// Handling native asset for this parachain
if let Ok(currency_id) = AssetId::decode(&mut &key[..]) {
// we currently have only one native asset
match currency_id {
0 => Some(currency_id),
_ => None,
}
} else {
None
}
}
// delegate to asset-registry
_ => AssetRegistry::location_to_asset(AssetLocation(location)),
}
}
}
impl Convert<MultiAsset, Option<AssetId>> for CurrencyIdConvert {
fn convert(asset: MultiAsset) -> Option<AssetId> {
if let MultiAsset {
id: Concrete(location), ..
} = asset
{
Self::convert(location)
} else {
None
}
}
}
pub struct AccountIdToMultiLocation;
impl Convert<AccountId, MultiLocation> for AccountIdToMultiLocation {
fn convert(account: AccountId) -> MultiLocation {
X1(AccountId32 {
network: NetworkId::Any,
id: account.into(),
})
.into()
}
}
/// The means for routing XCM messages which are not for local execution into the right message
/// queues.
pub type XcmRouter = (
// Two routers - use UMP to communicate with the relay chain:
cumulus_primitives_utility::ParentAsUmp<ParachainSystem, ()>,
// ..and XCMP to communicate with the sibling chains.
XcmpQueue,
);
/// Type for specifying how a `MultiLocation` can be converted into an `AccountId`. This is used
/// when determining ownership of accounts for asset transacting and when attempting to use XCM
/// `Transact` in order to determine the dispatch Origin.
pub type LocationToAccountId = (
// The parent (Relay-chain) origin converts to the default `AccountId`.
ParentIsDefault<AccountId>,
// Sibling parachain origins convert to AccountId via the `ParaId::into`.
SiblingParachainConvertsVia<Sibling, AccountId>,
// Straight up local `AccountId32` origins just alias directly to `AccountId`.
AccountId32Aliases<RelayNetwork, AccountId>,
);
pub type LocalAssetTransactor = MultiCurrencyAdapter<
Currencies,
UnknownTokens,
IsNativeConcrete<AssetId, CurrencyIdConvert>,
AccountId,
LocationToAccountId,
AssetId,
CurrencyIdConvert,
>;
|
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use std::ops::{Deref, DerefMut};
use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer};
pub const NAME: &str = "$__v8_magic_bytestring";
pub const FIELD_PTR: &str = "$__v8_magic_bytestring_ptr";
pub const FIELD_LEN: &str = "$__v8_magic_bytestring_len";
#[derive(PartialEq, Eq, Clone, Debug)]
pub struct ByteString(pub Vec<u8>);
impl ByteString {
pub fn new() -> ByteString {
ByteString(Vec::new())
}
pub fn with_capacity(capacity: usize) -> ByteString {
ByteString(Vec::with_capacity(capacity))
}
pub fn capacity(&self) -> usize {
self.0.capacity()
}
pub fn reserve(&mut self, additional: usize) {
self.0.reserve(additional)
}
pub fn reserve_exact(&mut self, additional: usize) {
self.0.reserve_exact(additional)
}
pub fn shrink_to_fit(&mut self) {
self.0.shrink_to_fit()
}
pub fn truncate(&mut self, len: usize) {
self.0.truncate(len)
}
pub fn push(&mut self, value: u8) {
self.0.push(value)
}
pub fn pop(&mut self) -> Option<u8> {
self.0.pop()
}
}
impl Default for ByteString {
fn default() -> Self {
ByteString::new()
}
}
impl Deref for ByteString {
type Target = [u8];
fn deref(&self) -> &[u8] {
self.0.deref()
}
}
impl DerefMut for ByteString {
fn deref_mut(&mut self) -> &mut [u8] {
self.0.deref_mut()
}
}
impl AsRef<[u8]> for ByteString {
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl AsMut<[u8]> for ByteString {
fn as_mut(&mut self) -> &mut [u8] {
self.0.as_mut()
}
}
impl Serialize for ByteString {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
use serde::ser::SerializeStruct;
let mut s = serializer.serialize_struct(NAME, 1)?;
s.serialize_field(FIELD_PTR, &(self.0.as_ptr() as usize))?;
s.serialize_field(FIELD_LEN, &self.0.len())?;
s.end()
}
}
impl<'de> Deserialize<'de> for ByteString {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ValueVisitor {}
impl<'de> Visitor<'de> for ValueVisitor {
type Value = ByteString;
fn expecting(
&self,
formatter: &mut std::fmt::Formatter,
) -> std::fmt::Result {
formatter.write_str("a serde_v8::ByteString")
}
fn visit_byte_buf<E>(self, v: Vec<u8>) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ByteString(v))
}
}
deserializer.deserialize_struct(NAME, &[], ValueVisitor {})
}
}
|
pub use self::account_info::AccountInfo;
mod account_info;
pub use self::loans_info::LoansInfo;
mod loans_info;
pub use self::loan::Loan;
mod loan;
pub use self::loan_builder::LoanBuilder;
mod loan_builder;
pub use self::session_token::SessionToken;
mod session_token;
|
pub const COMMENT_CHAR: char = '-';
|
use crate::{console::{get_curser, getch, set_curser}, keyboard::KeySpecial, print, print_string, utility::memset};
const CONSOLE_MAXCOMMANDBUFFERSIZE: usize = 300;
const CONSOLE_PROMPT: &'static str = ">";
type CommandFunc = fn(&[u8]);
#[repr(C, packed(1))]
struct Command {
pub command: &'static str,
pub help: &'static str,
pub command_function: CommandFunc
}
struct Parameter {
}
// static COMMAND_TABLE: &[Command] = &[
// Command{ command:"help", help: "Show Help", command_function:sHelp },
// Command{ command:"cls", help:"Clear Screen", command_function:sCls },
// Command{ command:"totalram", help:"Show Total RAM Size", command_function:sTotalRAMSize },
// Command{ command:"shutdown", help:"Show Total RAM Size", command_function:kShutdown },
// ];
pub fn start_shell(){
// let mut buffer: [u8; CONSOLE_MAXCOMMANDBUFFERSIZE] = [0u8; CONSOLE_MAXCOMMANDBUFFERSIZE];
// let mut buffer_index: usize = 0;
print!("{}", CONSOLE_PROMPT);
// let mut key: u8;
loop {
// key = getch();
// if key == KeySpecial::Backspace as u8 {
// if buffer_index > 0 {
// let curser = get_curser();
// print_string(curser.0 as i32 - 1, curser.1 as i32, b" ");
// set_curser(curser.0 - 1, curser.1);
// buffer_index -= 1;
// }
// } else if key == KeySpecial::Enter as u8 {
// print!("\n");
// if buffer_index > 0 {
// // execute_command(buffer[..buffer_index]);
// }
// print!("{}", CONSOLE_PROMPT);
// memset(buffer.as_mut_ptr(), b'\0', CONSOLE_MAXCOMMANDBUFFERSIZE as isize);
// } else if key == KeySpecial::Lshift as u8 || key == KeySpecial::Rshift as u8
// || key == KeySpecial::CapsLock as u8 || key == KeySpecial::NumLock as u8 || key == KeySpecial::ScrollLock as u8 {
// } else {
// if key == KeySpecial::Tab as u8 { key = b' '; }
// if buffer_index < CONSOLE_MAXCOMMANDBUFFERSIZE {
// buffer[buffer_index] = key;
// print!("{}", key as char);
// }
// }
}
}
|
use chrono::{NaiveDate, NaiveDateTime};
use postgres::{Client, NoTls, Row};
use sea_query::{ColumnDef, Iden, Order, PostgresDriver, PostgresQueryBuilder, Query, Table};
fn main() {
let mut client = Client::connect("postgresql://sea:sea@localhost/query", NoTls).unwrap();
// Schema
let sql = [
Table::drop()
.table(Document::Table)
.if_exists()
.build(PostgresQueryBuilder),
Table::create()
.table(Document::Table)
.if_not_exists()
.col(
ColumnDef::new(Document::Id)
.integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Document::JsonField).json_binary())
.col(ColumnDef::new(Document::Timestamp).timestamp())
.build(PostgresQueryBuilder),
]
.join("; ");
println!("{}", sql);
let result = client.batch_execute(&sql).unwrap();
println!("Create table document: {:?}\n", result);
// Create
let document = DocumentStruct {
id: 1,
json_field: serde_json::json! {{
"a": 25.0,
"b": "whatever",
"c": {
"another": "object",
"bla": 1
}
}},
timestamp: NaiveDate::from_ymd(2020, 1, 1).and_hms(2, 2, 2),
};
let (sql, values) = Query::insert()
.into_table(Document::Table)
.columns(vec![Document::JsonField, Document::Timestamp])
.values_panic(vec![
serde_json::to_value(document.json_field).unwrap().into(),
document.timestamp.into(),
])
.build(PostgresQueryBuilder);
let result = client.execute(sql.as_str(), &values.as_params());
println!("Insert into document: {:?}\n", result);
// Read
let (sql, values) = Query::select()
.columns(vec![Document::Id, Document::JsonField, Document::Timestamp])
.from(Document::Table)
.order_by(Document::Id, Order::Desc)
.limit(1)
.build(PostgresQueryBuilder);
let rows = client.query(sql.as_str(), &values.as_params()).unwrap();
println!("Select one from document:");
for row in rows.into_iter() {
let item = DocumentStruct::from(row);
println!("{:?}", item);
}
println!();
}
#[derive(Iden)]
enum Document {
Table,
Id,
JsonField,
Timestamp,
}
#[derive(Debug)]
struct DocumentStruct {
id: i32,
json_field: serde_json::Value,
timestamp: NaiveDateTime,
}
impl From<Row> for DocumentStruct {
fn from(row: Row) -> Self {
Self {
id: row.get("id"),
json_field: row.get("json_field"),
timestamp: row.get("timestamp"),
}
}
}
|
use std::{process::exit};
use chiropterm::{*, colors::{LtRed, White}};
use euclid::*;
use chiroptui::*;
const ASPECT_CONFIG: AspectConfig = AspectConfig {
pref_min_term_size: size2(80, 50), // but expect ~112x60
pref_max_term_size: size2(256, 256),
};
pub fn main() {
// TODO: Load terrain from disk, if present
let mut io = IO::new(
"Example editor".to_string(),
ASPECT_CONFIG,
|_| exit(0)
);
main_loop(&mut io);
}
fn main_loop(io: &mut IO) {
let theme = Theme::W95_FRUITY;
/*
theme.window.borders = WindowBorders::DOS {
active_title_fg: theme.window.color.1,
inactive_title_fg: Light[2],
};
*/
let ui = UI::new(theme);
let label: Label = Label::new().setup(|l| {
l.set_text("Please enter a filename (will be created if the file does not exist). PS Bhijn drinks piss.")
});
let prompt1: InputBox = InputBox::new().setup(|ib| ib.max_width = Some(20));
let prompt2: InputBox = InputBox::new().setup(|ib| ib.max_width = Some(20));
let prompt3: InputBox = InputBox::new().setup(|ib| ib.max_width = Some(2));
let prompt4: InputBox = InputBox::new().setup(|ib| ib.max_width = Some(2));
let lbl = label.share();
let button = Button::new().setup(move |b| {
b.hotkey = Some(Keycode::D);
b.text = "D - Devour robot".to_owned();
b.command = Some(Box::new(move |ui, _, _| {
let mut l_b = lbl.borrow_mut();
if l_b.unique.get_text().starts_with("P") {
l_b.unique.set_text("Nyeh!");
ui.recompute_layout();
return Signal::Refresh;
} else {
let tx = l_b.unique.get_text().replace("e", "eeeeee"); // unique.text += " Nyeh!"
l_b.unique.set_text(tx);
ui.recompute_layout();
return Signal::Modal(Box::new(|io: &mut IO| {
io.menu(|out, menu| {
let i = menu.on_mouse(|_| Signal::Break);
out.brush().region(rect(2, 2, 80, 80)).interactor(i, (255, 255)).putfs("HELLO, ROBOT!");
menu.on_key(OnKey::only(Keycode::A).pressed(), |k| {
println!("key A: {:?}", k);
Signal::Continue
})
});
Signal::Refresh
}));
}
}));
});
let col: Column = Column::new();
col.setup(|c| {
c.add(Spacer::new());
c.add(label.share());
c.add(Row::new().setup(|r| {
r.add(prompt1.share());
r.add(prompt2.share());
r.add(prompt3.share());
r.add(prompt4.share());
// r.add(Spacer::new());
}));
c.add(Canvas::new().setup(|c| {
c.layout_hacks.preferred_width = Some(30);
c.layout_hacks.preferred_height = Some(2);
c.set_draw(|b, _| {
use colors::*;
b.fill(FSem::new().color((LtRed[2], LtYellow[2])))
})
}));
c.add(button);
c.add(Spacer::new());
});
let win = Window::new();
win.setup(|w| {
w.set_title("TITLE BAR!!!");
w.set(col.share())
});
let all0 = Column::new();
all0.setup(|c| {
c.add(Spacer::new());
c.add(win.share());
c.add(Spacer::new());
c.add(Window::new().setup(|w| {
w.set(Border::new().setup(|b| {
b.set_north(Label::new().setup(|l| l.set_text("NORTH NORTH NORTH NORTH")));
b.set_west(Label::new().setup(|l| l.set_text("WEST")));
b.set_center(Canvas::new().setup(|c| {
c.set_draw(|b, _| {
b.fill(FSem::new().color((LtRed[1], White)));
b.putfs("HELLO, SNACK!!!");
});
c.layout_hacks.preferred_height = Some(4);
}));
b.set_east(Label::new().setup(|l| l.set_text("EAST")));
b.set_south(Label::new().setup(|l| l.set_text("SOUTH SOUTH SOUTH SOUTH")));
}))
}));
c.add(Spacer::new());
c.add(Deck::new().setup(|d| {
d.add(Window::new().setup(|w| w.set_title("WINDOW 1")));
d.add(Window::new().setup(|w| w.set_title("WINDOW 2")));
d.add(Window::new().setup(|w| {
w.set_title("WINDOW 3");
w.set(Label::new().setup(|l| { l.set_text("I'm a bat!"); }));
}));
}));
c.add(Spacer::new());
c.add(Window::new().setup(|w| {
w.set(BulletinBoard::new().setup(|bb| {
bb.add(point2(0, 0), Label::new().setup(|l| { l.set_text("Baby seal") }));
bb.add(point2(2, 0), Label::new().setup(|l| { l.set_text("t zone!") }));
bb.add(point2(2, 2), Label::new().setup(|l| { l.set_text("t zone!") }));
}))
}));
c.add(Spacer::new());
});
let all = Row::new();
all.setup(|r| {
r.add(Spacer::new());
r.add(all0.share());
r.add(Spacer::new());
});
let all2 = Scrollable::new().setup(|sb| sb.set(all));
io.menu(|out, menu: Menu| {
out.brush().fill(FSem::new().color(ui.theme().base.wallpaper));
all2.draw(ui.share(), out.brush().region(out.rect().inflate(-2, -2)), menu)
});
} |
#![feature(const_fn)]
extern crate memento;
pub use memento::arch::cortex_m0::isr::*;
pub use memento::arch::*;
static mut isr: ExceptionVectors = ExceptionVectors {
initial_sp: &__STACK_START,
.. ExceptionVectors::DEFAULT
};
fn main() {
}
|
pub mod auth;
pub mod image;
pub mod category; |
use backend::Backend;
use result::QueryResult;
use super::{Query, CombinableQuery, QueryBuilder, QueryFragment, BuildQueryResult};
#[derive(Debug)]
pub struct UnionQuery<L, R> {
left: L,
right: R,
}
impl<L, R> UnionQuery<L, R> {
pub fn new(left: L, right: R) -> Self {
UnionQuery {
left: left,
right: right,
}
}
}
impl<L, R> Query for UnionQuery<L, R>
where L: CombinableQuery,
R: CombinableQuery<SqlType = L::SqlType>
{
type SqlType = <L as Query>::SqlType;
}
impl<L, R> CombinableQuery for UnionQuery<L, R> where UnionQuery<L, R>: Query {}
impl<L, R, DB> QueryFragment<DB> for UnionQuery<L, R>
where DB: Backend,
L: QueryFragment<DB>,
R: QueryFragment<DB>
{
fn to_sql(&self, out: &mut DB::QueryBuilder) -> BuildQueryResult {
try!(self.left.to_sql(out));
out.push_sql(" UNION ");
try!(self.right.to_sql(out));
Ok(())
}
fn collect_binds(&self, out: &mut DB::BindCollector) -> QueryResult<()> {
try!(self.left.collect_binds(out));
try!(self.right.collect_binds(out));
Ok(())
}
fn is_safe_to_cache_prepared(&self) -> bool {
self.left.is_safe_to_cache_prepared() && self.right.is_safe_to_cache_prepared()
}
}
impl_query_id!(UnionQuery<L, R>);
|
#![deny(warnings)]
extern crate conduit_proxy;
use std::process;
// Look in lib.rs.
fn main() {
// Load configuration.
let config = match conduit_proxy::app::init() {
Ok(c) => c,
Err(e) => {
eprintln!("configuration error: {:#?}", e);
process::exit(64)
}
};
conduit_proxy::Main::new(config, conduit_proxy::SoOriginalDst).run();
}
|
//! Kingslayer is a text-based dungeon crawler adventure game and game engine
pub use cli::Cli;
/// The Cli type
pub mod cli;
mod entity;
mod input;
mod player;
mod types;
mod util;
mod world;
|
use rustypy::{PyArg, PyBool, PyList, PyTuple};
use std::iter::FromIterator;
#[no_mangle]
pub unsafe extern "C" fn python_bind_list2(list: *mut PyList) -> *mut PyList {
let converted = unpack_pylist!(list; PyList{PyTuple{(I64, (F32, I64,),)}});
assert_eq!(
vec![(50i64, (1.0f32, 30i64)), (25i64, (0.5f32, 40i64))],
converted
);
let v: Vec<PyTuple> = vec![
pytuple!(PyArg::F64(0.5f64), PyArg::PyBool(PyBool::from(true))),
pytuple!(PyArg::F64(-0.5f64), PyArg::PyBool(PyBool::from(false))),
];
PyList::from_iter(v).into_raw()
}
#[no_mangle]
pub unsafe extern "C" fn python_bind_nested1_t_n_ls(list: *mut PyList) -> *mut PyList {
let converted = unpack_pylist!(list; PyList{PyList{PyTuple{(I64, (F32, I64,),)}}});
assert_eq!(
vec![
vec![(50i64, (1.0f32, 30i64))],
vec![(25i64, (0.5f32, 40i64))],
],
converted
);
let mut v0 = Vec::new();
for x in converted {
let mut v1 = Vec::new();
for (f1, (f2, f3)) in x {
let t_e = pytuple!(
PyArg::I64(f1),
PyArg::PyTuple(pytuple!(PyArg::F32(f2), PyArg::I64(f3)).into_raw())
);
v1.push(t_e);
}
v0.push(v1);
}
PyList::from_iter(v0).into_raw()
}
#[no_mangle]
pub unsafe extern "C" fn python_bind_nested2_t_n_ls(list: *mut PyList) -> *mut PyList {
let mut unpacked = unpack_pylist!(list; PyList{PyTuple{({PyList{I64 => i64}}, F32,)}});
assert_eq!(vec![(vec![1, 2, 3], 0.1), (vec![3, 2, 1], 0.2)], unpacked);
unpacked.swap(0, 1);
let mut v0 = Vec::new();
for (f1, f2) in unpacked {
let e = pytuple!(
PyArg::PyList(PyList::from_iter(f1).into_raw()),
PyArg::F32(f2)
);
v0.push(e);
}
PyList::from_iter(v0).into_raw()
}
|
//! Use `hyper` as a driver for `happi`
//!
//! Implements `happi::Client` for `hyper::Client`.
use futures::{FutureExt, TryFutureExt};
pub use hyper::{client::connect::HttpConnector, Body, Request, Response};
use crate as happi;
impl happi::Client for hyper::Client<HttpConnector, Body> {
fn execute(&self,
req: Request<Body>)
-> happi::Fut<Result<Response<Body>, happi::Error>> {
self.request(req).map_err(happi::Error::Http).boxed()
}
}
#[cfg(test)]
mod test {
use crate as happi;
#[test]
fn it_works() {
async fn run() {
let _mock = mockito::mock("GET", "/hello").with_status(200)
.with_body("foo")
.create();
let client = hyper::Client::new();
let req = hyper::Request::get(format!("{}/hello", mockito::server_url())).body(hyper::Body::empty()).unwrap();
let res = happi::Client::execute(&client, req).await.unwrap();
assert_eq!(res.status(), 200)
}
tokio_test::block_on(run())
}
}
|
use std::collections::HashMap;
pub fn character_replacement(s: String, k: i32) -> i32 {
let cc: Vec<_> = s.chars().collect();
let (mut res, mut l, mut maxf) = (0, 0, 0);
let mut count: HashMap<char, u64> = HashMap::new();
for r in 0..s.len() {
// count.entry(s[r]).and_modify(|v| *v += 1).or_insert(1);
*count.entry(cc[r]).or_default() += 1;
maxf = maxf.max(*count.get(&cc[r]).unwrap());
while (r - l + 1) - maxf as usize > k as usize {
*count.get_mut(&cc[l]).unwrap() -= 1;
l += 1;
}
res = res.max(r - l + 1);
}
res as i32
}
fn main() {
assert_eq!(character_replacement("ABAB".to_string(), 2), 4);
assert_eq!(character_replacement("AABABBA".to_string(), 1), 4);
}
|
use crate::uses::*;
use core::ptr;
use modular_bitfield::{bitfield, BitfieldSpecifier};
use crate::int::idt::IRQ_TIMER;
use super::*;
#[derive(Debug, Clone, Copy)]
pub enum IoApicDest {
To(u8),
ToAll,
}
#[bitfield]
#[repr(u64)]
#[derive(Debug, Clone, Copy)]
pub struct IrqEntry {
vec: u8,
#[bits = 3]
deliv_mode: DelivMode,
#[bits = 1]
dest_mode: DestMode,
// read only
#[bits = 1]
#[skip(setters)]
deliv_status: DelivStatus,
#[bits = 1]
polarity: PinPolarity,
// read only
#[bits = 1]
#[skip(setters)]
remote_irr: RemoteIrr,
#[bits = 1]
trigger_mode: TriggerMode,
masked: bool,
#[skip] __: B39,
dest: u8,
}
impl IrqEntry {
pub(super) fn from(vec: u8, dest: IoApicDest, polarity: PinPolarity, trigger_mode: TriggerMode) -> Self {
let out = Self::new()
.with_deliv_mode(DelivMode::Fixed)
.with_vec(vec)
.with_polarity(polarity)
.with_trigger_mode(trigger_mode);
match dest {
IoApicDest::To(dest) => out.with_dest_mode(DestMode::Physical).with_dest(dest),
IoApicDest::ToAll => out.with_dest_mode(DestMode::Logical).with_dest(1),
}
}
pub fn new_masked() -> Self {
Self::new().with_masked(true)
}
}
pub struct IoApic {
select: *mut u32,
reg: *mut u32,
// holds max irqs - 1
max_irq_index: u8,
}
impl IoApic {
const IO_APIC_ID: u32 = 0;
const IO_APIC_VER: u32 = 1;
const IO_APIC_ARB: u32 = 2;
// safety: have to call init before calling any other methods
pub const unsafe fn new() -> Self {
IoApic {
select: null_mut(),
reg: null_mut(),
max_irq_index: 0,
}
}
// safety: pass a valid address to from
pub unsafe fn from(addr: PhysAddr) -> Self {
let mut out = Self::new();
out.init(addr);
out
}
// safety: pass a valid address to init
pub unsafe fn init(&mut self, addr: PhysAddr) {
let addr = phys_to_virt(addr).as_u64() as usize;
self.select = addr as *mut u32;
self.reg = (addr + 0x10) as *mut u32;
self.max_irq_index = get_bits(self.read_reg(Self::IO_APIC_VER) as usize, 16..24) as u8;
for irq in 0..=self.max_irq_index {
self.set_irq_entry(irq, IrqEntry::new_masked());
}
}
fn irq_index(&self, irq: u8) -> Option<u32> {
if irq > self.max_irq_index {
None
} else {
Some(0x10 + irq as u32 * 2)
}
}
// returns the number of irqs an apic has
pub fn max_irq_index(&mut self) -> u8 {
self.max_irq_index
}
fn read_reg(&mut self, reg: u32) -> u32 {
unsafe {
ptr::write_volatile(self.select, reg);
ptr::read_volatile(self.reg)
}
}
fn write_reg(&mut self, reg: u32, data: u32) {
unsafe {
ptr::write_volatile(self.select, reg);
ptr::write_volatile(self.reg, data);
}
}
// returns true if succesfully set irq
pub fn set_irq_entry(&mut self, irq: u8, entry: IrqEntry) -> bool {
match self.irq_index(irq) {
Some(index) => {
let entry: u64 = entry.into();
self.write_reg(index, get_bits(entry as usize, 0..32) as u32);
self.write_reg(index + 1, get_bits(entry as usize, 32..64) as u32);
true
},
None => false,
}
}
}
unsafe impl Send for IoApic {}
|
use diesel::prelude::*;
use diesel::result::Error;
use rocket::http::Status;
use rocket_contrib::json::Json;
use serde::Deserialize;
use crate::connection::DbConn;
use crate::schema::tasks;
use crate::task::Task;
#[get("/tasks")]
pub fn tasks_index(conn: DbConn) -> Result<Json<Vec<Task>>, Status> {
let query_result: QueryResult<Vec<Task>> = tasks::table.load::<Task>(&*conn);
query_result
.map(|task| Json(task))
.map_err(|_error| Status::InternalServerError)
}
#[get("/tasks/<id>")]
pub fn tasks_get(id: i32, conn: DbConn) -> Result<Json<Task>, Status> {
let query_result: QueryResult<Task> = tasks::table.find(id).get_result::<Task>(&*conn);
query_result
.map(|task| Json(task))
.map_err(|error| match error {
Error::NotFound => Status::NotFound,
_ => Status::InternalServerError,
})
}
#[derive(Insertable)]
#[table_name = "tasks"]
struct InsertableTask {
description: String,
completed: bool,
}
impl InsertableTask {
fn from_task(task: TaskDescriptionData) -> InsertableTask {
InsertableTask {
description: task.description,
completed: false,
}
}
}
#[derive(Deserialize)]
pub struct TaskDescriptionData {
description: String,
}
#[post("/tasks", format = "application/json", data = "<task>")]
pub fn tasks_post(task: Json<TaskDescriptionData>, conn: DbConn) -> Result<Status, Status> {
let query_result = diesel::insert_into(tasks::table)
.values(&InsertableTask::from_task(task.into_inner()))
.get_result::<Task>(&*conn);
query_result
.map(|_task| Status::Created)
.map_err(|_error| Status::InternalServerError)
}
#[derive(Deserialize, AsChangeset)]
#[table_name = "tasks"]
pub struct TaskChangeset {
completed: Option<bool>,
description: Option<String>,
}
#[patch("/tasks/<id>", format = "application/json", data = "<task>")]
pub fn tasks_update(
id: i32,
task: Json<TaskChangeset>,
conn: DbConn,
) -> Result<Json<Task>, Status> {
let query_result = diesel::update(tasks::table.find(id))
.set(task.into_inner())
.get_result(&*conn);
query_result
.map(|task| Json(task))
.map_err(|error| match error {
Error::NotFound => Status::NotFound,
_ => Status::InternalServerError,
})
}
#[delete("/tasks/<id>")]
pub fn tasks_delete(id: i32, conn: DbConn) -> Result<Status, Status> {
match tasks::table.find(id).get_result::<Task>(&*conn) {
Ok(_) => diesel::delete(tasks::table.find(id))
.execute(&*conn)
.map(|_| Status::NoContent)
.map_err(|_| Status::InternalServerError),
Err(error) => match error {
Error::NotFound => Err(Status::NotFound),
_ => Err(Status::InternalServerError),
},
}
}
|
//! Working with the text format.
pub use wasm_webidl_bindings_text_parser::*;
/// Parse the given straw proposal text format input into an AST.
pub fn parse(
module: &walrus::Module,
indices_to_ids: &walrus::IndicesToIds,
input: &str,
) -> anyhow::Result<crate::ast::WebidlBindings> {
let mut bindings = crate::ast::WebidlBindings::default();
let mut actions = crate::ast::BuildAstActions::new(&mut bindings, module, indices_to_ids);
parse_with_actions(&mut actions, input)?;
Ok(bindings)
}
|
pub fn run() {
let input: Vec<u64> = include_str!("input.txt")
.lines()
.map(|l| l.parse().expect("Not a number."))
.collect();
let invalid = get_first_invalid(input.clone(), 25);
let mut sum_set = find_continous_set(input.clone(), invalid);
sum_set.sort();
let result = sum_set[0] + sum_set[sum_set.len()-1];
println!("Day09 - Part 1: {}", invalid);
println!("Day09 - Part 2: {}", result);
}
fn find_continous_set(input: Vec<u64>, sum: u64) -> Vec<u64> {
let n = input.len();
for i in 0..n {
let mut current = input[i];
for j in (i+1)..n {
if current == sum {
return input[i..j].to_vec();
}
if current > sum {
break;
}
current += input[j];
}
}
panic!("Did not find set");
}
fn get_first_invalid(input: Vec<u64>, step: usize) -> u64 {
let mut i = step;
while is_valid_number(input[(i-step)..i].to_vec(), input[i]) {
i += 1;
}
input[i]
}
fn is_valid_number(mut prev: Vec<u64>, num: u64) -> bool{
prev.sort();
let mut l = 0;
let mut r = prev.len()-1;
while l != r {
let sum = prev[l] + prev[r];
if sum == num {
return true;
}
else if sum > num {
r -= 1;
}
else {
l += 1;
}
}
false
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_valid_number() {
let valid = is_valid_number(vec![35, 20, 15, 25, 47], 40);
assert_eq!(true, valid);
}
#[test]
fn test_is_not_valid_number() {
let valid = is_valid_number(vec![95, 102, 117, 150, 182], 127);
assert_eq!(false, valid);
}
#[test]
fn test_find_first_invalid() {
let invalid = get_first_invalid(vec![35, 20, 15, 25, 47, 40, 62, 55, 65, 95, 102, 117, 150, 182, 127, 219, 299, 277, 309, 576], 5);
assert_eq!(127, invalid);
}
#[test]
fn test_find_continous_set() {
let set = find_continous_set(vec![35, 20, 15, 25, 47, 40, 62, 55, 65, 95, 102, 117, 150, 182, 127, 219, 299, 277, 309, 576], 127);
assert_eq!(15, set[0]);
assert_eq!(40, set[set.len()-1]);
}
} |
mod action;
mod device;
mod error;
mod handle;
mod instance;
mod intern;
mod physical_device;
pub use crate::{action::*, error::*, instance::*, intern::*, physical_device::*};
use handle::Handle;
pub use intern::Path;
use std::collections::HashMap;
use crate::device::*;
use std::{mem, ptr};
use winapi::shared::hidusage::*;
use winapi::shared::windef::*;
use winapi::um::libloaderapi::*;
use winapi::um::winuser::*;
const NORSE_DESKTOP_INTERACTION_PROFILE_NAME: &'static str = "/interaction_profiles/norse/desktop";
type ProfilePath = Path;
type SourcePath = Path;
pub type Subpath = Path;
// e.g /user/hand/left
type UserPath = Path;
// e.g /input/mouse/left/click
type InputPath = Path;
impl Instance {
pub fn create_system(&self) -> Result<System> {
Ok(System)
}
pub unsafe fn create_session(&mut self, system: &System) -> Result<Session> {
let instance = GetModuleHandleW(ptr::null());
let hwnd = CreateWindowExW(
0,
self.class_name.as_ptr(),
std::ptr::null(),
0,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
std::ptr::null_mut(),
std::ptr::null_mut(),
instance,
std::ptr::null_mut(),
);
let raw_devices = [
RAWINPUTDEVICE {
usUsagePage: HID_USAGE_PAGE_GENERIC,
usUsage: HID_USAGE_GENERIC_KEYBOARD,
dwFlags: RIDEV_DEVNOTIFY | RIDEV_INPUTSINK,
hwndTarget: hwnd,
},
RAWINPUTDEVICE {
usUsagePage: HID_USAGE_PAGE_GENERIC,
usUsage: HID_USAGE_GENERIC_MOUSE,
dwFlags: RIDEV_DEVNOTIFY | RIDEV_INPUTSINK,
hwndTarget: hwnd,
},
];
RegisterRawInputDevices(
raw_devices.as_ptr(),
raw_devices.len() as _,
mem::size_of::<RAWINPUTDEVICE>() as _,
);
let user_paths = UserPaths {
mouse: self.interner.intern("/user/mouse"),
keyboard: self.interner.intern("/user/keyboard"),
};
let mut devices = HashMap::new();
devices.insert(user_paths.keyboard, Device::new_keyboard());
devices.insert(user_paths.mouse, Device::new_mouse(&mut self.interner));
Ok(Session {
hwnd,
user_paths,
devices,
active_profile: self.profiles.desktop,
})
}
}
pub enum FormFactor {
Desktop,
}
pub enum Event {}
pub struct System;
pub(crate) struct UserPaths {
pub mouse: UserPath,
pub keyboard: UserPath,
}
pub struct Session {
hwnd: HWND,
user_paths: UserPaths,
devices: HashMap<UserPath, Device>,
active_profile: ProfilePath,
}
|
pub fn prercent_toggle_normal() {
use percent_encoding::{percent_decode, utf8_percent_encode, AsciiSet, CONTROLS};
const FRAGMENT: AsciiSet = CONTROLS.add(b' ').add(b'"').add(b'<').add(b'>').add(b'`');
let input = "confident, <>`\"productive systems programming";
let iter = utf8_percent_encode(input, &FRAGMENT);
let encoded: String = iter.collect();
assert_eq!(
encoded,
"confident,%20%3C%3E%60%22productive%20systems%20programming"
);
let iter = percent_decode(encoded.as_bytes());
let decoded = iter.decode_utf8().unwrap();
assert_eq!(input, decoded);
println!("before: {}\nafter: {}", input, encoded);
}
pub fn str_toggle_x_www_form_urlencoded() {
use url::form_urlencoded::{byte_serialize, parse};
let urlencoded: String = byte_serialize("What is ❤?".as_bytes()).collect();
let decoded: String = parse(urlencoded.as_bytes())
.map(|(key, val)| [key, val].concat())
.collect();
println!("urlencoded:'{}'", urlencoded);
println!("decoded:'{}'", decoded);
}
pub fn str_to_0x() {
use data_encoding::HEXUPPER;
let original = b"The quick brown fox jumps over the lazy dog.";
let encoded = HEXUPPER.encode(original);
let decoded = HEXUPPER.decode(encoded.as_bytes()).unwrap();
println!("ori: {:?}\ndecoded: {}", decoded, encoded);
let decoded: String = decoded.iter().map(|&x| x as char).collect();
println!("ori: {:?}\ndecoded: {}", decoded, encoded);
}
pub fn str_to_base64() {
use base64;
let hello = "hello rustaceans";
let encoded = base64::encode(hello.as_bytes());
let decoded = base64::decode(encoded.as_bytes());
println!("ori: {:?}\ndecoded: {}", decoded, encoded);
}
#[test]
pub fn test() {
prercent_toggle_normal();
str_toggle_x_www_form_urlencoded();
str_to_0x();
str_to_base64();
}
|
#[cfg(test)]
mod basic_integration_tests {
use dualib::app;
use std::process::Command;
static EXEC_NAME: &'static str = "./target/debug/dua";
macro_rules! assert_vu8_str_eq {
( $x:expr, $y:expr ) => {
{
assert_eq!(std::str::from_utf8($x).unwrap(), $y)
}
};
}
#[test]
fn given_summarize_and_depth_options_then_it_should_fail() {
let mut output = Vec::<u8>::new();
let mut error = Vec::<u8>::new();
match app::app(&vec![EXEC_NAME, "-s", "-d", "0", "../test-data"], &mut output, &mut error) {
Ok(_) => unreachable!(),
Err(x) => assert_eq!(x, 1)
};
assert_vu8_str_eq!(&error, "depth and summarize cannot be used together\n");
}
#[test]
fn given_summarize_option_with_one_entry_then_it_should_be_the_same_as_depth_0() {
let mut output = Vec::<u8>::new();
let mut error = Vec::<u8>::new();
let mut output2 = Vec::<u8>::new();
let mut error2 = Vec::<u8>::new();
app::app(&vec![EXEC_NAME, "-s", "../test-data"], &mut output, &mut error).unwrap();
app::app(&vec![EXEC_NAME, "-d", "0", "../test-data"], &mut output2, &mut error2).unwrap();
assert_eq!(output, output2);
assert_vu8_str_eq!(&error, "");
}
#[test]
fn given_summarize_option_with_one_entry_then_it_should_only_return_one_result() {
let mut output = Vec::<u8>::new();
let mut error = Vec::<u8>::new();
println!("{:?}", std::env::current_dir().unwrap());
app::app(&vec![EXEC_NAME, "../test-data", "-d", "0"], &mut output, &mut error).unwrap();
assert_vu8_str_eq!(&output, "");
assert_vu8_str_eq!(&error, "");
}
}
|
use std::rc::Rc;
use std::cell::RefCell;
use std::collections::HashMap;
use module::{ModulePlans, NetworkTarget};
use net::{ClientId, InPacket, OutPacket};
use ship::{ShipId, ShipNetworked, ShipRef};
use sim::SimEvents;
#[cfg(feature = "client")]
use sim::SimEffects;
#[cfg(feature = "client")]
use asset_store::AssetStore;
// Time value of 1 tick in seconds
pub static TICKS_PER_SECOND: u32 = 20;
pub struct BattleContext {
pub ships: HashMap<ShipId, ShipRef>,
pub ships_client_id: HashMap<ClientId, ShipRef>,
pub ships_list: Vec<ShipRef>,
}
impl BattleContext {
pub fn new(ships: Vec<ShipRef>) -> BattleContext {
let mut ships_map = HashMap::new();
for ship in ships.iter() {
ships_map.insert(ship.borrow().id, ship.clone());
}
let mut ships_client_id_map = HashMap::new();
for ship in ships.iter() {
match ship.borrow().client_id {
Some(client_id) => { ships_client_id_map.insert(client_id, ship.clone()); },
None => {},
}
}
BattleContext {
ships: ships_map,
ships_client_id: ships_client_id_map,
ships_list: ships,
}
}
pub fn get_ship<'a>(&'a self, ship_id: ShipId) -> &'a ShipRef {
match self.ships.get(&ship_id) {
Some(ship) => ship,
None => panic!("No ship with ID {}", ship_id),
}
}
pub fn get_ship_by_client_id<'a>(&'a self, client_id: ClientId) -> &'a ShipRef {
match self.ships_client_id.get(&client_id) {
Some(ship) => ship,
None => panic!("No ship with client ID {}", client_id),
}
}
pub fn add_ship(&mut self, ship: ShipRef) {
self.ships_list.push(ship.clone());
self.ships.insert(ship.borrow().id, ship.clone());
let client_id = ship.borrow().client_id;
if let Some(client_id) = client_id {
self.ships_client_id.insert(client_id, ship);
}
}
pub fn add_ships(&mut self, ships: Vec<ShipRef>) {
for ship in ships {
self.add_ship(ship);
}
}
pub fn add_networked_ship(&mut self, ship: ShipNetworked) -> ShipRef {
let (ship, targets) = ship.to_ship();
let ship = Rc::new(RefCell::new(ship));
self.add_ship(ship.clone());
ship.borrow_mut().set_targets(self, &targets);
ship
}
pub fn add_networked_ships(&mut self, ships: Vec<ShipNetworked>) {
let ships: Vec<(ShipRef, Vec<(Option<NetworkTarget>, Option<NetworkTarget>)>)> =
ships.into_iter().map(
|s| {
let (ship, targets) = s.to_ship();
let ship = Rc::new(RefCell::new(ship));
self.add_ship(ship.clone());
(ship, targets)
}
).collect();
for (ship, targets) in ships {
ship.borrow_mut().set_targets(self, &targets);
}
}
pub fn remove_ship(&mut self, ship_id: ShipId) {
self.on_ship_removed(ship_id);
self.ships_list.retain(|ship| ship.borrow().id != ship_id);
// TODO optimize this
// Rebuild hash maps
self.ships = HashMap::new();
for ship in self.ships_list.iter() {
self.ships.insert(ship.borrow().id, ship.clone());
}
self.ships_client_id = HashMap::new();
for ship in self.ships_list.iter() {
match ship.borrow().client_id {
Some(client_id) => { self.ships_client_id.insert(client_id, ship.clone()); },
None => {},
}
}
}
pub fn server_preprocess(&mut self) {
for ship in self.ships_list.iter() {
ship.borrow_mut().server_preprocess();
}
}
pub fn before_simulation(&mut self, events: &mut SimEvents) {
for ship in self.ships_list.iter() {
ship.borrow().before_simulation(events, ship);
}
}
#[cfg(feature = "client")]
pub fn add_plan_effects(&self, asset_store: &AssetStore, effects: &mut SimEffects) {
for ship in self.ships_list.iter() {
ship.borrow().add_plan_effects(asset_store, effects, ship);
}
}
#[cfg(feature = "client")]
pub fn add_simulation_effects(&self, asset_store: &AssetStore, effects: &mut SimEffects) {
for ship in self.ships_list.iter() {
ship.borrow().add_simulation_effects(asset_store, effects, ship);
}
}
pub fn after_simulation(&self) {
for ship in self.ships_list.iter() {
ship.borrow_mut().after_simulation();
}
}
pub fn on_ship_removed(&self, ship_id: ShipId) {
for ship in self.ships_list.iter() {
ship.borrow_mut().on_ship_removed(ship_id);
}
}
pub fn apply_module_plans(&self) {
for ship in self.ships_list.iter() {
ship.borrow_mut().apply_module_plans();
}
}
pub fn write_results(&self, packet: &mut OutPacket) {
packet.write(&(self.ships.len() as u32));
for ship in self.ships_list.iter() {
packet.write(&ship.borrow().id);
ship.borrow().write_results(packet);
}
}
pub fn read_results(&self, packet: &mut InPacket) {
let num_ships: u32 = packet.read().unwrap();
for _ in 0 .. num_ships {
let ship_id = packet.read().unwrap();
let ship = self.get_ship(ship_id);
ship.borrow_mut().read_results(self, packet);
}
}
}
// Packets sent from client to server
#[derive(Debug, PartialEq, RustcEncodable, RustcDecodable)]
pub enum ServerPacketId {
Plan, // Player's plans
}
// Packets sent from server to client
#[derive(Debug, PartialEq, RustcEncodable, RustcDecodable)]
pub enum ClientPacketId {
SimResults, // Calculated simulation results from server
} |
#![deny(warnings)]
use embedded_graphics::{
mono_font::MonoTextStyleBuilder,
prelude::*,
primitives::{Circle, Line, PrimitiveStyleBuilder},
text::{Baseline, Text, TextStyleBuilder},
};
use embedded_hal::prelude::*;
use epd_waveshare::{
color::*,
epd4in2::{Display4in2, Epd4in2},
graphics::DisplayRotation,
prelude::*,
};
use linux_embedded_hal::{
spidev::{self, SpidevOptions},
sysfs_gpio::Direction,
Delay, Pin, Spidev,
};
// activate spi, gpio in raspi-config
// needs to be run with sudo because of some sysfs_gpio permission problems and follow-up timing problems
// see https://github.com/rust-embedded/rust-sysfs-gpio/issues/5 and follow-up issues
fn main() -> Result<(), std::io::Error> {
// Configure SPI
// Settings are taken from
let mut spi = Spidev::open("/dev/spidev0.0").expect("spidev directory");
let options = SpidevOptions::new()
.bits_per_word(8)
.max_speed_hz(4_000_000)
.mode(spidev::SpiModeFlags::SPI_MODE_0)
.build();
spi.configure(&options).expect("spi configuration");
// Configure Digital I/O Pin to be used as Chip Select for SPI
let cs = Pin::new(26); //BCM7 CE0
cs.export().expect("cs export");
while !cs.is_exported() {}
cs.set_direction(Direction::Out).expect("CS Direction");
cs.set_value(1).expect("CS Value set to 1");
let busy = Pin::new(5); //pin 29
busy.export().expect("busy export");
while !busy.is_exported() {}
busy.set_direction(Direction::In).expect("busy Direction");
//busy.set_value(1).expect("busy Value set to 1");
let dc = Pin::new(6); //pin 31 //bcm6
dc.export().expect("dc export");
while !dc.is_exported() {}
dc.set_direction(Direction::Out).expect("dc Direction");
dc.set_value(1).expect("dc Value set to 1");
let rst = Pin::new(16); //pin 36 //bcm16
rst.export().expect("rst export");
while !rst.is_exported() {}
rst.set_direction(Direction::Out).expect("rst Direction");
rst.set_value(1).expect("rst Value set to 1");
let mut delay = Delay {};
let mut epd4in2 =
Epd4in2::new(&mut spi, cs, busy, dc, rst, &mut delay, None).expect("eink initalize error");
println!("Test all the rotations");
let mut display = Display4in2::default();
display.set_rotation(DisplayRotation::Rotate0);
draw_text(&mut display, "Rotate 0!", 5, 50);
display.set_rotation(DisplayRotation::Rotate90);
draw_text(&mut display, "Rotate 90!", 5, 50);
display.set_rotation(DisplayRotation::Rotate180);
draw_text(&mut display, "Rotate 180!", 5, 50);
display.set_rotation(DisplayRotation::Rotate270);
draw_text(&mut display, "Rotate 270!", 5, 50);
epd4in2.update_frame(&mut spi, display.buffer(), &mut delay)?;
epd4in2
.display_frame(&mut spi, &mut delay)
.expect("display frame new graphics");
delay.delay_ms(5000u16);
println!("Now test new graphics with default rotation and some special stuff");
display.clear(Color::White).ok();
// draw a analog clock
let style = PrimitiveStyleBuilder::new()
.stroke_color(Color::Black)
.stroke_width(1)
.build();
let _ = Circle::with_center(Point::new(64, 64), 80)
.into_styled(style)
.draw(&mut display);
let _ = Line::new(Point::new(64, 64), Point::new(0, 64))
.into_styled(style)
.draw(&mut display);
let _ = Line::new(Point::new(64, 64), Point::new(80, 80))
.into_styled(style)
.draw(&mut display);
// draw white on black background
let style = MonoTextStyleBuilder::new()
.font(&embedded_graphics::mono_font::ascii::FONT_6X10)
.text_color(Color::White)
.background_color(Color::Black)
.build();
let text_style = TextStyleBuilder::new().baseline(Baseline::Top).build();
let _ = Text::with_text_style("It's working-WoB!", Point::new(175, 250), style, text_style)
.draw(&mut display);
// use bigger/different font
let style = MonoTextStyleBuilder::new()
.font(&embedded_graphics::mono_font::ascii::FONT_10X20)
.text_color(Color::White)
.background_color(Color::Black)
.build();
let _ = Text::with_text_style("It's working-WoB!", Point::new(50, 200), style, text_style)
.draw(&mut display);
// a moving `Hello World!`
let limit = 10;
epd4in2
.set_lut(&mut spi, &mut delay, Some(RefreshLut::Quick))
.unwrap();
epd4in2.clear_frame(&mut spi, &mut delay).unwrap();
for i in 0..limit {
//println!("Moving Hello World. Loop {} from {}", (i + 1), limit);
draw_text(&mut display, " Hello World! ", 5 + i * 12, 50);
epd4in2
.update_frame(&mut spi, display.buffer(), &mut delay)
.unwrap();
epd4in2
.display_frame(&mut spi, &mut delay)
.expect("display frame new graphics");
delay.delay_ms(1_000u16);
}
println!("Finished tests - going to sleep");
epd4in2.sleep(&mut spi, &mut delay)
}
fn draw_text(display: &mut Display4in2, text: &str, x: i32, y: i32) {
let style = MonoTextStyleBuilder::new()
.font(&embedded_graphics::mono_font::ascii::FONT_6X10)
.text_color(Color::White)
.background_color(Color::Black)
.build();
let text_style = TextStyleBuilder::new().baseline(Baseline::Top).build();
let _ = Text::with_text_style(text, Point::new(x, y), style, text_style).draw(display);
}
|
use crate::{
alphabet::Alphabet,
nfa::{standard::StandardNFA, standard_eps::StandardEpsilonNFA, NFA},
range_set::Range,
state::State,
};
use core::fmt;
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum EpsilonEquiped<A: Alphabet> {
Epsilon,
Alpha(A),
}
impl<A: Alphabet> EpsilonEquiped<A> {
fn to_usize(self) -> usize {
use EpsilonEquiped::*;
match self {
Epsilon => 0,
Alpha(sym) => sym.to_usize() + 1,
}
}
}
impl<A: Alphabet> fmt::Debug for EpsilonEquiped<A> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use EpsilonEquiped::*;
match *self {
Epsilon => write!(f, "ϵ"),
Alpha(sym) => write!(f, "{:?}", sym),
}
}
}
pub enum NFADisplay<'d, A: Alphabet, I: State> {
WithEps(&'d StandardEpsilonNFA<A, I>),
WithoutEps(&'d StandardNFA<A, I>),
}
impl<'d, A: Alphabet, I: State> From<&'d StandardEpsilonNFA<A, I>> for NFADisplay<'d, A, I> {
fn from(src: &'d StandardEpsilonNFA<A, I>) -> Self {
NFADisplay::WithEps(src)
}
}
impl<'d, A: Alphabet, I: State> From<&'d StandardNFA<A, I>> for NFADisplay<'d, A, I> {
fn from(src: &'d StandardNFA<A, I>) -> Self {
NFADisplay::WithoutEps(src)
}
}
impl<'d, A: Alphabet, I: State> NFADisplay<'d, A, I> {
fn all_states<'a: 'd>(&'a self) -> &'d Range<I> {
use NFADisplay::*;
match self {
WithEps(src) => src.all_states(),
WithoutEps(src) => src.all_states(),
}
}
}
impl<'d, 'a, A, I> dot::GraphWalk<'a, I, (I, EpsilonEquiped<A>, I)> for NFADisplay<'d, A, I>
where
A: Alphabet,
I: State,
{
fn nodes(&self) -> dot::Nodes<'a, I> {
self.all_states().into_iter().collect::<Vec<_>>().into()
}
fn edges(&'a self) -> dot::Edges<'a, (I, EpsilonEquiped<A>, I)> {
use NFADisplay::*;
let all_states = self.all_states().into_iter();
match self {
WithEps(src) => all_states
.flat_map(|state| {
A::value_iter()
.flat_map(move |sym| {
src.lookup_concrete(state, sym)
.map(move |next| (state, EpsilonEquiped::Alpha(sym), next))
})
.chain(
src.lookup_epsilon(state)
.map(move |next| (state, EpsilonEquiped::Epsilon, next)),
)
})
.collect::<Vec<_>>()
.into(),
WithoutEps(src) => all_states
.flat_map(|state| {
A::value_iter().flat_map(move |sym| {
src.lookup(state, sym)
.map(move |next| (state, EpsilonEquiped::Alpha(sym), next))
})
})
.collect::<Vec<_>>()
.into(),
}
}
fn source(&self, e: &(I, EpsilonEquiped<A>, I)) -> I {
e.0
}
fn target(&self, e: &(I, EpsilonEquiped<A>, I)) -> I {
e.2
}
}
impl<'d, 'a, A, I> dot::Labeller<'a, I, (I, EpsilonEquiped<A>, I)> for NFADisplay<'d, A, I>
where
A: Alphabet,
I: State,
{
fn graph_id(&'a self) -> dot::Id<'a> {
dot::Id::new("nfa_graph").unwrap()
}
fn node_id(&'a self, n: &I) -> dot::Id<'a> {
dot::Id::new(format!("N{:?}", *n)).unwrap()
}
fn node_label(&'a self, node: &I) -> dot::LabelText<'a> {
use NFADisplay::*;
let (is_start, is_accept) = match self {
WithEps(src) => {
let is_start = src.start_state() == node;
let is_accept = src.accept_states().contains(node);
(is_start, is_accept)
},
WithoutEps(src) => {
let is_start = src.start_states().contains(node);
let is_accept = src.accept_states().contains(node);
(is_start, is_accept)
},
};
if is_start || is_accept {
dot::LabelText::HtmlStr(format!("<font color=\"white\">{:?}</font>", node).into())
} else {
dot::LabelText::LabelStr(format!("{:?}", node).into())
}
}
fn edge_label<'b>(&'b self, e: &(I, EpsilonEquiped<A>, I)) -> dot::LabelText<'b> {
dot::LabelText::LabelStr(format!("{:?}", e.1).into())
}
fn edge_style(&'a self, _e: &(I, EpsilonEquiped<A>, I)) -> dot::Style {
dot::Style::Bold
}
fn edge_color(&'a self, e: &(I, EpsilonEquiped<A>, I)) -> Option<dot::LabelText<'a>> {
use NFADisplay::*;
let num_symbols = match self {
WithEps(_) => A::num_symbols() + 1,
WithoutEps(_) => A::num_symbols(),
};
if 2 < num_symbols && num_symbols <= 8 {
Some(dot::LabelText::LabelStr(
format!("/accent{}/{}", num_symbols, e.1.to_usize() + 1).into(),
))
} else if num_symbols == 2 {
let val = e.1.to_usize();
let color = if val == 0 { "/piyg3/1" } else { "/piyg3/3" };
Some(dot::LabelText::LabelStr(color.into()))
} else {
None
}
}
fn node_style(&'a self, node: &I) -> dot::Style {
use NFADisplay::*;
let (is_start, is_accept) = match self {
WithEps(src) => {
let is_start = src.start_state() == node;
let is_accept = src.accept_states().contains(node);
(is_start, is_accept)
},
WithoutEps(src) => {
let is_start = src.start_states().contains(node);
let is_accept = src.accept_states().contains(node);
(is_start, is_accept)
},
};
match (is_start, is_accept) {
(true, true) => dot::Style::Filled,
(_, true) => dot::Style::Filled,
(true, _) => dot::Style::Filled,
(false, false) => dot::Style::Solid,
}
}
fn node_color(&'a self, node: &I) -> Option<dot::LabelText<'a>> {
use NFADisplay::*;
let (is_start, is_accept) = match self {
WithEps(src) => {
let is_start = src.start_state() == node;
let is_accept = src.accept_states().contains(node);
(is_start, is_accept)
},
WithoutEps(src) => {
let is_start = src.start_states().contains(node);
let is_accept = src.accept_states().contains(node);
(is_start, is_accept)
},
};
let shape = match (is_start, is_accept) {
(true, true) => "purple",
(_, true) => "blue",
(true, _) => "red",
(false, false) => "black",
};
Some(dot::LabelText::LabelStr(shape.into()))
}
fn node_shape(&'a self, _node: &I) -> Option<dot::LabelText<'a>> {
Some(dot::LabelText::LabelStr("circle".into()))
}
}
|
// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use crocksdb_ffi;
use libc::{c_char, c_void};
use librocksdb_sys::{DBEnv, DBInfoLogLevel as InfoLogLevel, DBLogger};
use std::ffi::{CStr, CString};
use std::str;
pub trait Logger: Send + Sync {
fn logv(&self, log_level: InfoLogLevel, log: &str);
}
extern "C" fn destructor<L: Logger>(ctx: *mut c_void) {
unsafe {
let _ = Box::from_raw(ctx as *mut L);
}
}
extern "C" fn logv<L: Logger>(ctx: *mut c_void, log_level: InfoLogLevel, log: *const c_char) {
unsafe {
let logger = &*(ctx as *mut L);
let log = CStr::from_ptr(log);
logger.logv(log_level, &log.to_string_lossy());
}
}
pub fn new_logger<L: Logger>(l: L) -> *mut DBLogger {
unsafe {
let p: Box<dyn Logger> = Box::new(l);
crocksdb_ffi::crocksdb_logger_create(
Box::into_raw(p) as *mut c_void,
destructor::<L>,
logv::<L>,
)
}
}
pub fn create_env_logger(fname: &str, mut env: DBEnv) -> *mut DBLogger {
let name = CString::new(fname.as_bytes()).unwrap();
unsafe { crocksdb_ffi::crocksdb_create_env_logger(name.as_ptr(), &mut env) }
}
|
use std::fs;
use clap::Parser;
use goblin::Object;
/// Puts all the files in a directory into a vector. On success returns None, else returns the std::io::error
/// from io::Result.
///
/// # Arguments
///
/// * `directory_path` - The path which to collect all files in.
/// * `total_paths` - The vector which is populated with the files in the directory.
fn collect_all_files(directory_path: &str, total_paths: &mut Vec<std::path::PathBuf>) -> Option<std::io::Error> {
match fs::read_dir(directory_path) {
Ok(paths) => {
for path in paths {
if let Ok(path) = path {
total_paths.push(path.path());
}
}
},
Err(error) => {
return Some(error);
}
}
return None;
}
/// A program which can query a directory of files, find the binaries, and search for a specified Win API import.
#[derive(Parser, Debug)]
#[clap(version, about, long_about = None)]
struct Args {
/// Directory where to query for PE file imports
#[clap(value_name = "Directory")]
input_directory: String,
/// WinAPI import to search for
#[clap(value_name = "Import Name")]
import_name: String,
/// Print extra output while parsing
#[clap(short, long)]
verbose: bool
}
fn main() {
let args = Args::parse();
let api_query = args.import_name;
let mut total_paths : Vec<std::path::PathBuf> = vec![];
if let Some(error) = collect_all_files("C:\\Windows\\System32\\Drivers", &mut total_paths) {
panic!("[!] Error: {error}");
}
for path in &total_paths {
if let Ok(buffer) = fs::read(path) {
let object = Object::parse(&buffer);
match object {
Ok(object) => {
match object {
Object::PE(pe) => {
for import in &pe.imports {
if import.name.eq(&api_query) {
println!("[i] Found API ({}) match in {}", api_query, path.display());
}
}
},
Object::Elf(_) => (),
Object::Mach(_) => (),
Object::Archive(_) => (),
Object::Unknown(_) => ()
}
},
Err(error) => {
println!("[!] Error on {}: {}. Skipping...", path.display(), error);
}
}
}
}
}
|
mod easy;
mod hard;
mod medium;
mod sword_offer;
mod common;
pub fn main() {
let default = 4;
match default {
//Easy题
1 => invoke(|| easy::main()),
//Medium题
2 => invoke(|| medium::main()),
//Hard题
3 => invoke(|| hard::main()),
//剑指offer专题
4 => invoke(|| sword_offer::main()),
_ => {}
}
}
fn invoke(method: fn()) {
println!();
println!("力扣: 打印开始");
println!("+++++++++++++++++++++++++++++++++++++++++++++++");
method();
println!("+++++++++++++++++++++++++++++++++++++++++++++++");
println!("力扣: 打印结束");
println!();
} |
use crate::{
buffers::Acker,
event::{self, Event},
sinks::util::{
http::{HttpRetryLogic, HttpService},
retries::FixedRetryPolicy,
BatchServiceSink, Buffer, SinkExt,
},
};
use base64;
use std::collections::HashMap;
use std::time::Duration;
use futures::{Future, Sink};
use http::Method;
use hyper::{Client, Uri};
use serde::{Deserialize, Serialize};
use tower::ServiceBuilder;
const HEALTHCHECK_ENDPOINT: &'static str = "https://www.googleapis.com/auth/cloud-platform";
#[derive(Deserialize, Serialize, Debug, Default, Clone)]
#[serde(deny_unknown_fields)]
pub struct GCPPubsubSinkConfig {
project_id: String,
topic: String,
batch_size: Option<usize>,
// Tower Request configurations since we're using
// HTTP Rest APIs.
// TODO: GRPC interfaces requires rust binding to protos
pub request_timeout: Option<u64>,
pub request_rate_limit_duration_secs: Option<u64>,
pub request_rate_limit_num: Option<u64>,
pub request_in_flight_limit: Option<usize>,
pub request_retry_attempts: Option<usize>,
pub request_retry_backoff_secs: Option<u64>,
}
pub struct GCPPubsubSink;
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct PubsubMessage {
pub data: Vec<u8>,
pub attributes: HashMap<String, String>,
}
#[typetag::serde(name = "gcp_pubsub")]
impl crate::topology::config::SinkConfig for GCPPubsubSinkConfig {
fn build(&self, acker: Acker) -> Result<(super::RouterSink, super::Healthcheck), String> {
let sink = gcp_pubsub_sink(self.clone(), acker);
let check = healthcheck()?;
Ok((sink, check))
}
}
fn healthcheck() -> Result<super::Healthcheck, String> {
let client = Client::new();
let fut = client
.get(Uri::from_static(HEALTHCHECK_ENDPOINT))
.map_err(|e| format!("GCP healthcheck failed: {}", e))
.and_then(|_| Ok(())); // FIXME: Check for 200
Ok(Box::new(fut))
}
fn gcp_pubsub_sink(config: GCPPubsubSinkConfig, acker: Acker) -> super::RouterSink {
let retry_attempts = config.request_retry_attempts.unwrap_or(100);
let retry_backoff_secs = config.request_retry_backoff_secs.unwrap_or(1);
let in_flight_limit = config.request_in_flight_limit.unwrap_or(5);
let rate_limit_num = config.request_rate_limit_num.unwrap_or(5);
let rate_limit_duration = config.request_rate_limit_duration_secs.unwrap_or(1);
let timeout = config.request_timeout.unwrap_or(0);
let batch_size = config.batch_size.unwrap_or(0usize);
let policy = FixedRetryPolicy::new(
retry_attempts,
Duration::from_secs(retry_backoff_secs),
HttpRetryLogic,
);
let gcp_pubsub_service = HttpService::new(move |body: Vec<u8>| {
let publish_url = format!("https://pubsub.googleapis.com/v1/{}:publish", config.topic);
let mut builder = hyper::Request::builder();
builder.method(Method::POST);
builder.uri(&publish_url);
builder.header("Content-Type", "application/json");
builder.body(body).unwrap()
});
let service = ServiceBuilder::new()
.concurrency_limit(in_flight_limit)
.rate_limit(rate_limit_num, Duration::from_secs(rate_limit_duration))
.retry(policy)
.timeout(Duration::from_secs(timeout))
.service(gcp_pubsub_service);
let sink = BatchServiceSink::new(service, acker)
.batched_with_min(Buffer::new(false), batch_size, Duration::from_secs(timeout))
.with(move |event: Event| {
let mut body: Vec<u8> = vec![];
serde_json::to_writer(&mut body, &event.as_log().all_fields()).unwrap();
Ok(body)
});
Box::new(sink)
}
fn encode_event(event: &Event) -> PubsubMessage {
let log = event.as_log();
let payload = base64::encode(
&log.get(&event::MESSAGE)
.map(|v| v.as_bytes().to_vec())
.unwrap_or(Vec::new()),
);
if (log.is_structured()) {
let all = log
.explicit_fields()
.map(|(k, v)| (k.as_ref().to_string(), v.to_string_lossy()))
.collect::<HashMap<String, String>>();
PubsubMessage {
data: payload.as_bytes().to_vec(),
attributes: all,
}
} else {
PubsubMessage {
data: payload.as_bytes().to_vec(),
attributes: HashMap::new(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn pubsub_encode_event_non_structured() {
let raw_message = "hello cruel world".to_string();
let b64_message = "aGVsbG8gY3J1ZWwgd29ybGQ=".to_string();
let payload = encode_event(&raw_message.clone().into());
assert_eq!(payload.data, b64_message.as_bytes().to_vec());
}
#[test]
fn pubsub_encode_event_structured() {
let raw_message = "hello cruel world".to_string();
let b64_message = "aGVsbG8gY3J1ZWwgd29ybGQ=".to_string();
let mut event = Event::from(raw_message);
event.as_mut_log().insert_explicit("k".into(), "v".into());
event
.as_mut_log()
.insert_explicit("foo".into(), "bar".into());
let payload = encode_event(&event);
let expected: HashMap<String, String> = [
("k".to_string(), "v".to_string()),
("foo".to_string(), "bar".to_string()),
]
.iter()
.cloned()
.collect();
assert_eq!(payload.attributes, expected);
assert_eq!(payload.data, b64_message.as_bytes().to_vec())
}
}
|
use neon::prelude::*;
use rayon;
mod gridstore;
use gridstore::*;
mod fuzzy_phrase;
use crate::fuzzy_phrase::*;
register_module!(mut m, {
// set thread count to 16 regardless of number of cores
rayon::ThreadPoolBuilder::new().num_threads(16).build_global().unwrap();
m.export_class::<JsGridStoreBuilder>("GridStoreBuilder")?;
m.export_class::<JsGridStore>("GridStore")?;
m.export_class::<JsGridKeyStoreKeyIterator>("GridStoreKeyIterator")?;
m.export_function("coalesce", js_coalesce)?;
m.export_function("stackable", js_stackable)?;
m.export_function("stackAndCoalesce", js_stack_and_coalesce)?;
m.export_class::<JsFuzzyPhraseSetBuilder>("FuzzyPhraseSetBuilder")?;
m.export_class::<JsFuzzyPhraseSet>("FuzzyPhraseSet")?;
Ok(())
});
|
use criterion::{criterion_group, criterion_main, Criterion};
use scc::HashMap;
use std::convert::TryInto;
use std::time::Instant;
fn insert_cold(c: &mut Criterion) {
c.bench_function("HashMap: insert, cold", |b| {
b.iter_custom(|iters| {
let hashmap: HashMap<u64, u64> = HashMap::default();
let start = Instant::now();
for i in 0..iters {
assert!(hashmap.insert(i, i).is_ok());
}
let elapsed = start.elapsed();
drop(hashmap);
elapsed
})
});
}
fn insert_array_warmed_up(c: &mut Criterion) {
c.bench_function("HashMap: insert, array warmed up", |b| {
b.iter_custom(|iters| {
let hashmap: HashMap<u64, u64> = HashMap::default();
let ticket = hashmap.reserve((iters * 2).try_into().unwrap());
assert!(ticket.is_some());
let start = Instant::now();
for i in 0..iters {
assert!(hashmap.insert(i, i).is_ok());
}
let elapsed = start.elapsed();
drop(ticket);
drop(hashmap);
elapsed
})
});
}
fn insert_fully_warmed_up(c: &mut Criterion) {
c.bench_function("HashMap: insert, fully warmed up", |b| {
b.iter_custom(|iters| {
let hashmap: HashMap<u64, u64> = HashMap::default();
let ticket = hashmap.reserve((iters * 2).try_into().unwrap());
assert!(ticket.is_some());
for i in 0..iters {
assert!(hashmap.insert(i, i).is_ok());
assert!(hashmap.remove(&i).is_some());
}
let start = Instant::now();
for i in 0..iters {
assert!(hashmap.insert(i, i).is_ok());
}
let elapsed = start.elapsed();
drop(ticket);
drop(hashmap);
elapsed
})
});
}
fn read(c: &mut Criterion) {
let hashmap: HashMap<usize, usize> = HashMap::default();
assert!(hashmap.insert(1, 1).is_ok());
c.bench_function("HashMap: read", |b| {
b.iter(|| {
hashmap.read(&1, |_, v| assert_eq!(*v, 1));
})
});
}
criterion_group!(
hash_map,
insert_cold,
insert_array_warmed_up,
insert_fully_warmed_up,
read
);
criterion_main!(hash_map);
|
use kerla_runtime::address::UserVAddr;
use crate::result::{Errno, Result};
use crate::syscalls::SyscallHandler;
use crate::{ctypes::*, process::current_process};
use crate::user_buffer::UserBufWriter;
impl<'a> SyscallHandler<'a> {
pub fn sys_getcwd(&mut self, buf: UserVAddr, len: c_size) -> Result<isize> {
let cwd = current_process()
.root_fs()
.lock()
.cwd_path()
.resolve_absolute_path();
if (len as usize) < cwd.as_str().as_bytes().len() {
return Err(Errno::ERANGE.into());
}
let mut writer = UserBufWriter::from_uaddr(buf, len as usize);
writer.write_bytes(cwd.as_str().as_bytes())?;
writer.write(0u8)?;
Ok(buf.as_isize())
}
}
|
pub struct Texture {
_diffuse_texture: wgpu::Texture,
_diffuse_texture_view: wgpu::TextureView,
_diffuse_sampler: wgpu::Sampler,
diffuse_bind_group: wgpu::BindGroup,
}
pub static BIND_GROUP_LAYOUT_DESCRIPTOR: wgpu::BindGroupLayoutDescriptor =
wgpu::BindGroupLayoutDescriptor {
bindings: &[
wgpu::BindGroupLayoutBinding {
binding: 0,
visibility: wgpu::ShaderStage::FRAGMENT,
ty: wgpu::BindingType::SampledTexture {
multisampled: false,
dimension: wgpu::TextureViewDimension::D2,
},
},
wgpu::BindGroupLayoutBinding {
binding: 1,
visibility: wgpu::ShaderStage::FRAGMENT,
ty: wgpu::BindingType::Sampler,
},
],
};
impl Texture {
pub fn load_from_file<P: AsRef<std::path::Path>>(
path: P,
device: &wgpu::Device,
queue: &mut wgpu::Queue,
layout: &wgpu::BindGroupLayout,
) -> Result<Texture, image::error::ImageError> {
let bytes = match std::fs::read(path) {
Ok(bytes) => bytes,
Err(e) => return Err(image::error::ImageError::IoError(e)),
};
Self::from_buffer(device, queue, layout, &bytes)
}
pub fn from_buffer(
device: &wgpu::Device,
queue: &mut wgpu::Queue,
layout: &wgpu::BindGroupLayout,
bytes: &[u8],
) -> Result<Texture, image::error::ImageError> {
use image::error::{DecodingError, ImageError, ImageFormatHint};
let diffuse_image = image::load_from_memory(bytes)?;
let diffuse_rgba = match diffuse_image.as_rgba8() {
Some(image) => image,
None => {
return Err(ImageError::Decoding(DecodingError::new(
ImageFormatHint::Name("RGBA8".to_owned()),
"Textures must be in RGBA8 format (32-bit).".to_owned(),
)))
}
};
let dimensions = diffuse_rgba.dimensions();
let size = wgpu::Extent3d {
width: dimensions.0,
height: dimensions.1,
depth: 1,
};
let diffuse_texture = device.create_texture(&wgpu::TextureDescriptor {
size,
array_layer_count: 1,
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rgba8UnormSrgb,
usage: wgpu::TextureUsage::SAMPLED | wgpu::TextureUsage::COPY_DST,
});
let diffuse_buffer = device
.create_buffer_mapped(diffuse_rgba.len(), wgpu::BufferUsage::COPY_SRC)
.fill_from_slice(&diffuse_rgba);
let mut encoder =
device.create_command_encoder(&wgpu::CommandEncoderDescriptor { todo: 0 });
encoder.copy_buffer_to_texture(
wgpu::BufferCopyView {
buffer: &diffuse_buffer,
offset: 0,
row_pitch: 4 * dimensions.0,
image_height: dimensions.1,
},
wgpu::TextureCopyView {
texture: &diffuse_texture,
mip_level: 0,
array_layer: 0,
origin: wgpu::Origin3d::ZERO,
},
size,
);
queue.submit(&[encoder.finish()]);
let diffuse_texture_view = diffuse_texture.create_default_view();
let diffuse_sampler = device.create_sampler(&wgpu::SamplerDescriptor {
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Linear,
min_filter: wgpu::FilterMode::Nearest,
mipmap_filter: wgpu::FilterMode::Nearest,
lod_min_clamp: -100.0,
lod_max_clamp: 100.0,
compare_function: wgpu::CompareFunction::Always,
});
let diffuse_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
layout,
bindings: &[
wgpu::Binding {
binding: 0,
resource: wgpu::BindingResource::TextureView(&diffuse_texture_view),
},
wgpu::Binding {
binding: 1,
resource: wgpu::BindingResource::Sampler(&diffuse_sampler),
},
],
});
Ok(Texture {
_diffuse_texture: diffuse_texture,
_diffuse_texture_view: diffuse_texture_view,
_diffuse_sampler: diffuse_sampler,
diffuse_bind_group,
})
}
pub fn create_bind_group_layout(device: &wgpu::Device) -> wgpu::BindGroupLayout {
device.create_bind_group_layout(&BIND_GROUP_LAYOUT_DESCRIPTOR)
}
pub fn bind_group(&self) -> &wgpu::BindGroup {
&self.diffuse_bind_group
}
}
|
pub enum CME {
}
// Idea:
// #[derive(ATATErr)]
// #[at_err("+CME ERROR")]
// pub enum CmeError {
// #[at_arg(0, "Phone failure")]
// PhoneFailure,
// }
|
pub mod interpolation;
|
use crate::op::*;
use crate::value::{Object, Value};
#[derive(Clone, Debug)]
pub struct Chunk {
pub constants: Vec<Value>,
pub lines: Vec<usize>,
pub buffer: Vec<u8>,
}
impl Chunk {
pub fn new() -> Chunk {
Chunk {
constants: Vec::new(),
lines: Vec::new(),
buffer: Vec::new(),
}
}
pub fn push_byte(&mut self, v: u8, line: usize) {
self.buffer.push(v);
self.lines.push(line);
}
pub fn push_bytes(&mut self, v: &[u8], line: usize) {
self.buffer.extend_from_slice(v);
for _ in 0..v.len() {
self.lines.push(line);
}
}
pub fn push_const(&mut self, v: Value) -> u8 {
self.constants.push(v);
(self.constants.len() - 1) as u8
}
}
pub fn disassemble_chunk(into: &mut impl std::fmt::Write, chunk: &Chunk, name: &str) -> Result<(), std::fmt::Error> {
writeln!(into, "== {} ==", name)?;
let mut nested = Vec::<(String, Chunk)>::new();
let mut offset = 0;
while offset < chunk.buffer.len() {
offset += disassemble_instruction(into, chunk, offset, &mut nested)?;
}
for (name, chunk) in nested.iter() {
disassemble_chunk(into, chunk, name)?;
}
Ok(())
}
pub fn disassemble_instruction(
into: &mut impl std::fmt::Write,
chunk: &Chunk,
offset: usize,
nested: &mut Vec<(String, Chunk)>,
) -> Result<usize, std::fmt::Error> {
write!(into, "{:04} ", offset)?;
if offset > 0 && chunk.lines[offset] == chunk.lines[offset - 1] {
write!(into, " | ")?;
} else {
write!(into, "{:4} ", chunk.lines[offset])?;
}
let op = Opcode::decode_unchecked(chunk.buffer[offset]);
write!(into, "{}", op)?;
use Opcode::*;
match op {
Constant => writeln!(into, "\t{}", chunk.constants[chunk.buffer[offset + 1] as usize])?,
Closure => {
let value = &chunk.constants[chunk.buffer[offset + 1] as usize];
writeln!(into, "\t{}", value)?;
if let Value::Object(object) = value {
if let Object::Function(func) = &(*object.borrow()) {
nested.push((func.name.clone(), func.chunk.clone()));
let upvalues = func.num_upvalues as usize;
let mut local_offset = offset + 1;
for _ in 0..upvalues {
let is_local = chunk.buffer[local_offset + 1];
let index = chunk.buffer[local_offset + 2];
writeln!(
into,
"{:04} | \t\t{} {}",
local_offset + 1,
if is_local == 1 { "local" } else { "upvalue" },
index
)?;
local_offset += 2;
}
return Ok(op.operands() + 1 + upvalues as usize * 2);
}
}
}
Class => writeln!(into, "\t{}", chunk.constants[chunk.buffer[offset + 1] as usize])?,
Method => writeln!(into, "\t{}", chunk.constants[chunk.buffer[offset + 1] as usize])?,
DefineGlobal => writeln!(into, "\t{}", chunk.constants[chunk.buffer[offset + 1] as usize])?,
GetGlobal => writeln!(into, "\t{}", chunk.constants[chunk.buffer[offset + 1] as usize])?,
SetGlobal => writeln!(into, "\t{}", chunk.constants[chunk.buffer[offset + 1] as usize])?,
GetLocal => writeln!(into, "\t[{}]", chunk.buffer[offset + 1])?,
SetLocal => writeln!(into, "\t[{}]", chunk.buffer[offset + 1])?,
GetUpvalue => writeln!(into, "\t[{}]", chunk.buffer[offset + 1])?,
SetUpvalue => writeln!(into, "\t[{}]", chunk.buffer[offset + 1])?,
GetProp => writeln!(into, "\t{}", chunk.constants[chunk.buffer[offset + 1] as usize])?,
SetProp => writeln!(into, "\t{}", chunk.constants[chunk.buffer[offset + 1] as usize])?,
GetSuper => writeln!(into, "\t{}", chunk.constants[chunk.buffer[offset + 1] as usize])?,
Call => writeln!(into, "\t\targs {}", chunk.buffer[offset + 1])?,
JumpIfFalse => writeln!(
into,
"\t+{}",
u16::from_ne_bytes([chunk.buffer[offset + 1], chunk.buffer[offset + 2]])
)?,
Jump => writeln!(
into,
"\t\t+{}",
u16::from_ne_bytes([chunk.buffer[offset + 1], chunk.buffer[offset + 2]])
)?,
Loop => writeln!(
into,
"\t\t-{}",
u16::from_ne_bytes([chunk.buffer[offset + 1], chunk.buffer[offset + 2]])
)?,
_ => writeln!(into)?,
};
Ok(op.operands() + 1)
}
|
use std::collections::HashMap;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn examples() {
println!("{}",decode_bits("00000001100110011001100000011000000111111001100111111001111110000000000000011001111110011111100111111000000110011001111110000001111110011001100000011"));
assert_eq!(false,true);
}
}
// mod preloaded;
// use preloaded::MORSE_CODE;
// MORSE_CODE is `HashMap<String, String>`. e.g. ".-" -> "A".
// error-prone when input is "01110"
pub fn decode_bits(encoded: &str) -> String {
// how to find the transsion rate?
// but firstly trim begin and end
let encoded = encoded.trim_matches('0');
println!("{}",encoded);
if !encoded.contains("0") {
return ".".to_string();
}
let max_zeros = encoded.chars().filter(|c| *c=='0').count() as u32;
// println!("{},{}",encoded,max_zeros);
// use space to find the transsion rate
// !!! make sure to evluate lazy expression.
let zero_length:Vec<u32> = encoded.split('1').filter(|&x| {
//println!("{}",x);
x.len() != 0
}).map(|x| x.len() as u32).collect();
let one_length:Vec<u32> = encoded.split('0').filter(|&x| {
//println!("{}",x);
x.len() != 0
}).map(|x| x.len() as u32).collect();
//println!("{:?}",zero_length);
let mut transmission_rate = 1;
for i in 2..max_zeros+1 {
if zero_length.iter().all(|&x|
{
let n = x/i;
let rem = x%i;
rem == 0 && (n==1 || n==3 || n==7)
}) && one_length.iter().all(|&x|
{
let n = x/i;
let rem = x%i;
rem == 0 && (n==1 || n==3 || n==7)
}){
transmission_rate = i;
break;
}
}
// find the transmission_rate then
// dot is '1'*rate
// dash is '111'*rate
// pause between dot&dash '0'*rate
// pause between char '000'*rate
// pause between words '0000000'*rate
// it's dummy? no! the transimision may be 3 or 7 etc.
// let dot = (0..transmission_rate).fold(String::from(""), |acc,x| acc + "1");
// let dash = (0..transmission_rate).fold(String::from(""), |acc,x| acc + "111");
// let pause_bt_dd = (0..transmission_rate).fold(String::from(""), |acc,x| acc + "0");
// let pause_bt_ch = (0..transmission_rate).fold(String::from(""), |acc,x| acc + "000");
// let pause_bt_wd = (0..transmission_rate).fold(String::from(""), |acc,x| acc + "0000000");
//println!("{}",transmission_rate);
let res :String = encoded.chars().step_by(transmission_rate as usize).collect();
//println!("{}",res);
res.split("0000000").map(|word| {
word.split("000").map(|ch| {
ch.split("0").map(|d2d| {
if d2d == "1"{
"." // don't copy char from instructions
}else{
"-"
}
}).collect::<Vec<&str>>().join("") // use replace() is more readrable
}).collect::<Vec<String>>().join(" ")
}).collect::<Vec<String>>().join(" ")
}
// a simple morse decoder
fn decode_morse(encoded: &str) -> String {
// comment when submit
let MORSE_CODE:HashMap<String,String> = HashMap::new();
let encoded = encoded.trim();
encoded.split(" ").map(|x| {
//words here
x.split(" ").map(|word| {
if let Some(w) = MORSE_CODE.get(word) {
w
}else{
""
}
}).collect::<Vec<&str>>().join("")
}).collect::<Vec<String>>().join(" ")
}
/* Example Code
pub fn decode_bits(encoded: &str) -> String {
// Trim excess zeros at the start and end
let encoded = encoded.trim_matches('0');
// Get the length of a time unit by finding the shortest sequence of zeros or ones,
// this will represent a time unit of one which equals a dot
let rate = {
let rate_ones = encoded
.split("0")
.filter_map(|ones| (!ones.is_empty()).then(|| ones.len()))
.min()
.unwrap_or(usize::MAX);
let rate_zeros = encoded
.split("1")
.filter_map(|zeros| (!zeros.is_empty()).then(|| zeros.len()))
.min()
.unwrap_or(usize::MAX);
rate_zeros.min(rate_ones)
};
// Parse the encoded message
encoded
.chars() // Iterate through the characters
.step_by(rate) // Only parse every n-th code
.collect::<String>() // Collect it into a string
// Begin converting from 1/0 to dot/dash
.replace("111", "-") // Dash
.replace("1", ".") // Dot
.replace("0000000", " ") // Word seperator
.replace("000", " ") // Letter seperator
.replace("0", "") // Dot/Dash seperator
}
pub fn decode_morse(encoded: &str) -> String {
encoded
.trim()
.split(" ")
.map(|word| {
word.split(" ")
.filter_map(|letter| MORSE_CODE.get(letter).map(|letter| letter.clone()))
.collect::<String>()
})
.collect::<Vec<String>>()
.join(" ")
}
*/ |
use std::{
cmp,
ops::{Add, AddAssign, Neg, Sub, SubAssign},
};
use proc_macro2::TokenStream;
use quote::{quote, ToTokens};
// Copied from core lib /src/internal_macros.rs
macro_rules! forward_ref_unop {
(impl $imp:ident, $method:ident for $t:ty) => {
impl $imp for &$t {
type Output = <$t as $imp>::Output;
#[inline]
fn $method(self) -> <$t as $imp>::Output {
$imp::$method(*self)
}
}
};
}
macro_rules! forward_ref_binop {
(impl $imp:ident, $method:ident for $t:ty, $u:ty) => {
impl<'a> $imp<$u> for &'a $t {
type Output = <$t as $imp<$u>>::Output;
#[inline]
fn $method(self, other: $u) -> <$t as $imp<$u>>::Output {
$imp::$method(*self, other)
}
}
impl $imp<&$u> for $t {
type Output = <$t as $imp<$u>>::Output;
#[inline]
fn $method(self, other: &$u) -> <$t as $imp<$u>>::Output {
$imp::$method(self, *other)
}
}
impl $imp<&$u> for &$t {
type Output = <$t as $imp<$u>>::Output;
#[inline]
fn $method(self, other: &$u) -> <$t as $imp<$u>>::Output {
$imp::$method(*self, *other)
}
}
};
}
macro_rules! forward_ref_op_assign {
(impl $imp:ident, $method:ident for $t:ty, $u:ty) => {
impl $imp<&$u> for $t {
#[inline]
fn $method(&mut self, other: &$u) {
$imp::$method(self, *other);
}
}
};
}
/// This type represents a pointer offset, for shifts or offset instructions.
/// It is basically a `usize` with an extra sign bit.
#[derive(Clone, Copy, Debug)]
enum PtrOffset {
Backward(usize),
Zero,
Forward(usize),
}
impl Default for PtrOffset {
fn default() -> Self {
PtrOffset::Zero
}
}
impl From<usize> for PtrOffset {
fn from(val: usize) -> Self {
if val == 0 {
Self::Zero
} else {
Self::Forward(val)
}
}
}
impl Neg for PtrOffset {
type Output = PtrOffset;
fn neg(self) -> Self::Output {
match self {
Self::Backward(val) => Self::Forward(val),
Self::Zero => Self::Zero,
Self::Forward(val) => Self::Backward(val),
}
}
}
forward_ref_unop!(impl Neg, neg for PtrOffset);
impl Add for PtrOffset {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
match (self, rhs) {
(Self::Forward(l0), Self::Forward(r0)) => Self::Forward(l0.checked_add(r0).unwrap()),
(Self::Backward(l0), Self::Backward(r0)) => Self::Backward(l0.checked_add(r0).unwrap()),
(Self::Forward(l0), Self::Backward(r0)) => match l0.cmp(&r0) {
cmp::Ordering::Less => Self::Backward(r0 - l0),
cmp::Ordering::Equal => Self::Zero,
cmp::Ordering::Greater => Self::Forward(l0 - r0),
},
(Self::Backward(l0), Self::Forward(r0)) => match l0.cmp(&r0) {
cmp::Ordering::Less => Self::Forward(r0 - l0),
cmp::Ordering::Equal => Self::Zero,
cmp::Ordering::Greater => Self::Backward(l0 - r0),
},
(Self::Backward(_) | Self::Forward(_), Self::Zero) => self,
(Self::Zero, Self::Backward(_) | Self::Forward(_)) => rhs,
(Self::Zero, Self::Zero) => Self::Zero,
}
}
}
forward_ref_binop!(impl Add, add for PtrOffset, PtrOffset);
impl Add<usize> for PtrOffset {
type Output = Self;
fn add(self, rhs: usize) -> Self::Output {
self + Self::from(rhs)
}
}
forward_ref_binop!(impl Add, add for PtrOffset, usize);
impl Sub for PtrOffset {
type Output = Self;
fn sub(self, rhs: Self) -> Self::Output {
self + (-rhs)
}
}
forward_ref_binop!(impl Sub, sub for PtrOffset, PtrOffset);
impl Sub<usize> for PtrOffset {
type Output = PtrOffset;
fn sub(self, rhs: usize) -> Self::Output {
self - Self::from(rhs)
}
}
forward_ref_binop!(impl Sub, sub for PtrOffset, usize);
impl AddAssign for PtrOffset {
fn add_assign(&mut self, rhs: Self) {
*self = *self + rhs;
}
}
forward_ref_op_assign!(impl AddAssign, add_assign for PtrOffset, PtrOffset);
impl AddAssign<usize> for PtrOffset {
fn add_assign(&mut self, rhs: usize) {
*self = *self + Self::from(rhs);
}
}
forward_ref_op_assign!(impl AddAssign, add_assign for PtrOffset, usize);
impl SubAssign for PtrOffset {
fn sub_assign(&mut self, rhs: Self) {
*self = *self - rhs;
}
}
forward_ref_op_assign!(impl SubAssign, sub_assign for PtrOffset, PtrOffset);
impl SubAssign<usize> for PtrOffset {
fn sub_assign(&mut self, rhs: usize) {
*self = *self - Self::from(rhs);
}
}
forward_ref_op_assign!(impl SubAssign, sub_assign for PtrOffset, usize);
impl PartialEq for PtrOffset {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::Zero, Self::Zero) => true,
(Self::Backward(l0), Self::Backward(r0)) | (Self::Forward(l0), Self::Forward(r0)) => {
l0 == r0
}
_ => false,
}
}
}
impl Eq for PtrOffset {}
impl PartialOrd for PtrOffset {
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for PtrOffset {
fn cmp(&self, other: &Self) -> cmp::Ordering {
match (self, other) {
(PtrOffset::Backward(_), PtrOffset::Zero | PtrOffset::Forward(_))
| (PtrOffset::Zero, PtrOffset::Forward(_)) => cmp::Ordering::Less,
(PtrOffset::Backward(l0), PtrOffset::Backward(r0)) => r0.cmp(l0),
(PtrOffset::Zero, PtrOffset::Zero) => cmp::Ordering::Equal,
(PtrOffset::Forward(l0), PtrOffset::Forward(r0)) => l0.cmp(r0),
(PtrOffset::Forward(_), PtrOffset::Zero | PtrOffset::Backward(_))
| (PtrOffset::Zero, PtrOffset::Backward(_)) => cmp::Ordering::Greater,
}
}
}
impl ToTokens for PtrOffset {
fn to_tokens(&self, tokens: &mut TokenStream) {
(match self {
PtrOffset::Backward(val) => quote! {.checked_sub(#val).unwrap() },
PtrOffset::Zero => quote! {},
PtrOffset::Forward(val) => quote! { .checked_add(#val).unwrap() },
})
.to_tokens(tokens);
}
}
/// This enum represents all the possible instruction types.
/// The AST we use is a Vec<Instruction>
#[derive(Clone, PartialEq, Eq, Debug)]
enum Instruction {
ProgramStart(ProgramStart),
ShiftPtr(ShiftPtr),
ScanLoop(ScanLoop),
IncrementCell(IncrementCell),
SetCell(SetCell),
MultiplyToCell(MultiplyToCell),
ReadToCell(ReadToCell),
PrintCell(PrintCell),
PrintValue(PrintValue),
Loop(Loop),
Debug(Debug),
}
impl ToTokens for Instruction {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
match self {
Instruction::ProgramStart(instr) => instr.to_tokens(tokens),
Instruction::ShiftPtr(instr) => instr.to_tokens(tokens),
Instruction::ScanLoop(scan) => scan.to_tokens(tokens),
Instruction::IncrementCell(instr) => instr.to_tokens(tokens),
Instruction::SetCell(instr) => instr.to_tokens(tokens),
Instruction::MultiplyToCell(instr) => instr.to_tokens(tokens),
Instruction::ReadToCell(instr) => instr.to_tokens(tokens),
Instruction::PrintCell(instr) => instr.to_tokens(tokens),
Instruction::PrintValue(instr) => instr.to_tokens(tokens),
Instruction::Loop(instr) => instr.to_tokens(tokens),
Instruction::Debug(instr) => instr.to_tokens(tokens),
}
}
}
/// A special instruction for the start of the program.
#[derive(Clone, Copy, PartialEq, Eq, Default, Debug)]
struct ProgramStart {}
impl ToTokens for ProgramStart {
fn to_tokens(&self, tokens: &mut TokenStream) {
(quote! {
let mut memory = ::esolangs::brainfuck::macro_reexports::tinyvec::tiny_vec!([u8; 30000]);
memory.resize(1, 0);
let mut ptr: usize = 0;
let mut input = ::std::io::stdin();
let mut output = ::std::io::stdout();
let mut buf = [0u8];
})
.to_tokens(tokens);
}
}
/// `<`, `>`, and combinations thereof.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
struct ShiftPtr {
offset: PtrOffset,
}
impl ToTokens for ShiftPtr {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let offset = self.offset;
match offset {
PtrOffset::Backward(_) => {
(quote! {
ptr = ptr #offset;
})
.to_tokens(tokens);
}
PtrOffset::Forward(_) => {
(quote! {
ptr = ptr #offset;
memory.resize(::core::cmp::max(memory.len(), ptr.checked_add(1).unwrap()), 0);
})
.to_tokens(tokens);
}
PtrOffset::Zero => {}
}
}
}
/// `<`, `>`, and combinations thereof.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
struct ScanLoop {
value: u8,
forward: bool,
}
impl ToTokens for ScanLoop {
fn to_tokens(&self, tokens: &mut TokenStream) {
let value = self.value;
if self.forward {
(quote! {
if let Some(offset) = ::esolangs::brainfuck::macro_reexports::memchr::memchr(#value, &(&memory)[ptr..]) {
ptr += offset;
} else {
ptr = memory.len();
memory.resize(memory.len().checked_add(1).unwrap(), 0);
}
}).to_tokens(tokens);
} else {
(quote! {
ptr = ::esolangs::brainfuck::macro_reexports::memchr::memrchr(#value, &(&memory)[..=ptr]).unwrap();
}).to_tokens(tokens);
}
}
}
// `+`, `-`, and derivatives thereof.
// Subtraction is implemented with wrapping addition.
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
struct IncrementCell {
amount: u8,
offset: PtrOffset,
}
impl ToTokens for IncrementCell {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let amount = self.amount;
let offset = self.offset;
if amount != 0 {
(quote! {
memory[ptr #offset] = memory[ptr #offset].wrapping_add(#amount);
})
.to_tokens(tokens);
}
}
}
// `[-]`, `[-]+`, etc.
// Also produced by multiply lopps
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
struct SetCell {
value: u8,
offset: PtrOffset,
}
impl ToTokens for SetCell {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let value = self.value;
let offset = self.offset;
(quote! {
memory[ptr #offset] = #value;
})
.to_tokens(tokens);
}
}
// Produced by multiply loops like `[->++<]`
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
struct MultiplyToCell {
source_offset: PtrOffset,
target_offset: PtrOffset,
coefficient: u8,
}
impl ToTokens for MultiplyToCell {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let source_offset = self.source_offset;
let target_offset = self.target_offset;
let coef = self.coefficient;
(match coef {
0 => quote! { },
1 => quote! { memory[ptr #target_offset] = memory[ptr #source_offset].wrapping_add(memory[ptr #target_offset]) },
coef => quote! { memory[ptr #target_offset] = (memory[ptr #source_offset] * #coef).wrapping_add(memory[ptr #target_offset]) },
})
.to_tokens(tokens);
}
}
// `,`
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
struct ReadToCell {
offset: PtrOffset,
}
impl ToTokens for ReadToCell {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let offset = self.offset;
(quote! {
if ::std::io::Read::read_exact(&mut input, &mut buf).is_ok() {
memory[ptr #offset] = buf[0];
}
})
.to_tokens(tokens);
}
}
// `;`
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
struct PrintCell {
offset: PtrOffset,
}
impl ToTokens for PrintCell {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let offset = self.offset;
(quote! {
if let Err(err) = ::std::io::Write::write(&mut output, &[memory[ptr #offset]]) {
if err.kind() == ::std::io::ErrorKind::Interrupted {
let _ = ::std::io::Write::write(&mut output, &[memory[ptr #offset]]);
}
}
let _ = ::std::io::Write::flush(&mut output);
})
.to_tokens(tokens);
}
}
// `;` when the value to be printed is known at compile time
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
struct PrintValue {
value: u8,
}
impl ToTokens for PrintValue {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let value = self.value;
(quote! {
if let Err(err) = ::std::io::Write::write(&mut output, &[#value]) {
if err.kind() == ::std::io::ErrorKind::Interrupted {
let _ = ::std::io::Write::write(&mut output, &[#value]);
}
}
let _ = ::std::io::Write::flush(&mut output);
})
.to_tokens(tokens);
}
}
// `[stuff]`
#[derive(Clone, PartialEq, Eq, Debug)]
struct Loop {
inner: Vec<Instruction>,
}
impl ToTokens for Loop {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let mut inner_tokens = TokenStream::new();
for instr in &self.inner {
instr.to_tokens(&mut inner_tokens);
}
(quote! {
while memory[ptr] != 0 {
#inner_tokens
}
})
.to_tokens(tokens);
}
}
// `#`
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
struct Debug {}
impl ToTokens for Debug {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
(quote! {
println!("Pointer: {} | Memory: {}", ptr, memory);
})
.to_tokens(tokens);
}
}
#[derive(Clone, Default, PartialEq, Eq, Debug)]
pub struct Ast {
instructions: Vec<Instruction>,
}
impl Ast {
pub(crate) fn optimize(&mut self) {
self.instructions = optimize_instructions(&self.instructions);
}
pub(crate) fn from_bytes(bytes: &[u8]) -> Self {
Self {
instructions: parse_bytes(bytes),
}
}
}
fn parse_bytes(bytes: &[u8]) -> Vec<Instruction> {
fn parse_bytes_inner(instructions: &mut Vec<Instruction>, bytes: &[u8]) {
let mut i: usize = 0;
while i < bytes.len() {
match bytes[i] {
b'>' => {
instructions.push(Instruction::ShiftPtr(ShiftPtr {
offset: PtrOffset::Forward(1),
}));
i += 1;
}
b'<' => {
instructions.push(Instruction::ShiftPtr(ShiftPtr {
offset: PtrOffset::Backward(1),
}));
i += 1;
}
b'+' => {
instructions.push(Instruction::IncrementCell(IncrementCell {
amount: 1,
offset: PtrOffset::Zero,
}));
i += 1;
}
b'-' => {
instructions.push(Instruction::IncrementCell(IncrementCell {
amount: 255,
offset: PtrOffset::Zero,
}));
i += 1;
}
b'.' => {
instructions.push(Instruction::PrintCell(PrintCell {
offset: PtrOffset::Zero,
}));
i += 1;
}
b',' => {
instructions.push(Instruction::ReadToCell(ReadToCell {
offset: PtrOffset::Zero,
}));
i += 1;
}
b'#' => {
instructions.push(Instruction::Debug(Debug {}));
i += 1;
}
b'[' => {
let mut nesting: usize = 1;
let mut inner_bytes = vec![];
i += 1;
while nesting > 0 {
inner_bytes.push(bytes[i]);
match bytes[i] {
b'[' => nesting += 1,
b']' => nesting -= 1,
_ => (),
}
i += 1;
}
let mut inner_vec = Vec::with_capacity(inner_bytes.len());
parse_bytes_inner(&mut inner_vec, &inner_bytes);
instructions.push(Instruction::Loop(Loop { inner: inner_vec }));
}
_ => i += 1,
}
}
}
let mut instructions: Vec<Instruction> = Vec::with_capacity(bytes.len() + 1);
instructions.push(Instruction::ProgramStart(ProgramStart {}));
parse_bytes_inner(&mut instructions, bytes);
instructions
}
// Enum for instructions thar read from or write to a cell
#[derive(PartialEq, Eq, Debug)]
enum InstrAccessingCell<'a> {
ProgramStart(&'a mut ProgramStart),
IncrementCell(&'a mut IncrementCell),
SetCell(&'a mut SetCell),
MultiplyToCell(&'a mut MultiplyToCell),
ReadToCell(&'a mut ReadToCell),
PrintCell(&'a mut PrintCell),
Loop(&'a mut Loop),
ScanLoop(&'a mut ScanLoop),
}
macro_rules! impl_from_instr_accessing {
($($instr_ty:ident),*) => {$(
impl<'a> From<&'a mut $instr_ty> for InstrAccessingCell<'a> {
fn from(instr: &'a mut $instr_ty) -> Self {
InstrAccessingCell::$instr_ty(instr)
}
}
)*};
}
impl_from_instr_accessing!(IncrementCell, SetCell, ReadToCell, PrintCell, Loop);
// Used during optimization: when adding each new instruction to the optimize output,
// we first search for whether there is a possible optimization involving a previous operation involving
// the same cell.
fn last_instr_accessing_cell(
prev_instrs: &mut [Instruction],
mut offset: PtrOffset,
) -> Option<(usize, InstrAccessingCell)> {
let mut last_accessing: Option<(usize, InstrAccessingCell)> = None;
for prev_instr in prev_instrs.iter_mut().enumerate().rev() {
//println!(" Offset: {:?}", offset);
match prev_instr {
(_, Instruction::ShiftPtr(shift)) => offset -= shift.offset,
(i, Instruction::ProgramStart(start)) => {
last_accessing = Some((i, InstrAccessingCell::ProgramStart(start)));
assert_eq!(i, 0);
break;
}
(_, Instruction::Debug(_) | Instruction::PrintValue(_)) => {
last_accessing = None;
break;
}
(i, Instruction::IncrementCell(incr)) => {
if offset == incr.offset {
last_accessing = Some((i, InstrAccessingCell::IncrementCell(incr)));
break;
}
}
(i, Instruction::SetCell(set)) => {
if offset == set.offset {
last_accessing = Some((i, InstrAccessingCell::SetCell(set)));
break;
}
}
(i, Instruction::MultiplyToCell(mul)) => {
if offset == mul.source_offset || offset == mul.target_offset {
last_accessing = Some((i, InstrAccessingCell::MultiplyToCell(mul)));
break;
}
}
(i, Instruction::ReadToCell(read)) => {
if offset == read.offset {
last_accessing = Some((i, InstrAccessingCell::ReadToCell(read)));
break;
}
}
(i, Instruction::PrintCell(print)) => {
if offset == print.offset {
last_accessing = Some((i, InstrAccessingCell::PrintCell(print)));
break;
}
}
(i, Instruction::Loop(loop_instr)) => {
if offset == PtrOffset::Zero {
last_accessing = Some((i, InstrAccessingCell::Loop(loop_instr)));
} else {
last_accessing = None;
// TODO check loop interior?
}
break;
}
(i, Instruction::ScanLoop(scan)) => {
if offset == PtrOffset::Zero {
last_accessing = Some((i, InstrAccessingCell::ScanLoop(scan)));
} else {
last_accessing = None;
}
break;
}
}
}
//println!(" Last accessing: {:?}", last_accessing);
last_accessing
}
fn last_coalescable_ptr_shift_idx(
prev_instrs: &mut [Instruction],
) -> Option<(usize, &mut ShiftPtr)> {
//println!("Checking last shift");
let mut shift_idx = None;
for prev_instr in prev_instrs.iter_mut().enumerate().rev() {
match prev_instr {
(idx, Instruction::ShiftPtr(shift)) => {
shift_idx = Some((idx, shift));
break;
}
(_, Instruction::PrintValue(_)) => {}
_ => {
shift_idx = None;
break;
}
};
}
//println!("Last shift {:?}", shift_idx);
shift_idx
}
fn optimize_coalescing_pass(input: &[Instruction]) -> Vec<Instruction> {
fn optimized_add_instr(output: &mut Vec<Instruction>, instr: Instruction) {
//println!(" {:?} {:?} Adding {:?}", output.len(), output, instr);
match instr {
Instruction::ProgramStart(_) => {
assert_eq!(output.len(), 0);
output.push(instr);
}
Instruction::IncrementCell(incr) => {
// TODO switch to @ bindings on Rust 1.56
let amount = incr.amount;
let offset = incr.offset;
if let Some((idx, prev_instr)) = last_instr_accessing_cell(output, incr.offset) {
match prev_instr {
InstrAccessingCell::ProgramStart(_) => {
output.push(Instruction::SetCell(SetCell {
value: amount,
offset,
}));
}
InstrAccessingCell::IncrementCell(prev_incr) => {
prev_incr.amount = prev_incr.amount.wrapping_add(amount);
if prev_incr.amount == 0 {
output.remove(idx);
}
}
InstrAccessingCell::SetCell(prev_set) => {
prev_set.value = prev_set.value.wrapping_add(amount);
}
InstrAccessingCell::Loop(_) => output.push(Instruction::SetCell(SetCell {
value: amount,
offset,
})),
InstrAccessingCell::ScanLoop(scan) => {
let value = scan.value;
output.push(Instruction::SetCell(SetCell {
value: amount + value,
offset,
}))
}
_ => output.push(instr),
}
} else {
output.push(instr);
}
}
Instruction::SetCell(set) => {
let mut prev_value: Option<u8> = None;
while let Some((idx, prev_instr)) = last_instr_accessing_cell(output, set.offset) {
match prev_instr {
InstrAccessingCell::IncrementCell(_)
| InstrAccessingCell::MultiplyToCell(_)
| InstrAccessingCell::SetCell(_) => {
output.remove(idx);
}
InstrAccessingCell::ProgramStart(_) | InstrAccessingCell::Loop(_) => {
prev_value = Some(0);
break;
}
InstrAccessingCell::ScanLoop(scan) => {
let value = scan.value;
prev_value = Some(value);
break;
}
_ => break,
}
}
match prev_value {
Some(val) if val == set.value => {}
_ => output.push(instr),
}
}
Instruction::MultiplyToCell(_) => output.push(instr), //TODO
Instruction::PrintCell(print) => {
match last_instr_accessing_cell(output, print.offset) {
Some((_, InstrAccessingCell::SetCell(set))) => {
let value = set.value;
output.push(Instruction::PrintValue(PrintValue { value }));
}
Some((
_,
InstrAccessingCell::ProgramStart(_) | InstrAccessingCell::Loop(_),
)) => {
output.push(Instruction::PrintValue(PrintValue { value: 0 }));
}
Some((_, InstrAccessingCell::ScanLoop(scan))) => {
let value = scan.value;
output.push(Instruction::PrintValue(PrintValue { value }));
}
_ => output.push(instr),
}
}
Instruction::ShiftPtr(shift) => {
if let Some((_, prev_shift)) = last_coalescable_ptr_shift_idx(output) {
prev_shift.offset += shift.offset;
} else {
output.push(instr);
}
}
Instruction::Loop(mut loop_instr) => {
loop_instr.inner = optimize_instructions(&loop_instr.inner);
let mut is_loop = true;
if loop_instr.inner.len() == 1 {
match loop_instr.inner[0] {
Instruction::IncrementCell(incr) => {
if incr.amount == 0 {
loop_instr.inner.clear();
} else {
is_loop = false;
optimized_add_instr(
output,
Instruction::SetCell(SetCell {
value: 0,
offset: PtrOffset::Zero,
}),
);
}
}
Instruction::ShiftPtr(shift) => match shift.offset {
// TODO detect more kinds of scan loops
PtrOffset::Backward(1usize) => {
is_loop = false;
optimized_add_instr(
output,
Instruction::ScanLoop(ScanLoop {
value: 0,
forward: false,
}),
);
}
PtrOffset::Forward(1usize) => {
is_loop = false;
optimized_add_instr(
output,
Instruction::ScanLoop(ScanLoop {
value: 0,
forward: true,
}),
);
}
_ => {}
},
_ => (),
}
}
if is_loop {
match last_instr_accessing_cell(output, PtrOffset::Zero) {
Some((_, prev_instr)) => match prev_instr {
InstrAccessingCell::SetCell(set) => {
if set.value != 0 {
output.push(Instruction::Loop(loop_instr));
}
}
InstrAccessingCell::Loop(_)
| InstrAccessingCell::ProgramStart(_)
| InstrAccessingCell::ScanLoop(ScanLoop { value: 0, .. }) => {}
_ => output.push(Instruction::Loop(loop_instr)),
},
_ => output.push(Instruction::Loop(loop_instr)),
}
}
}
Instruction::ScanLoop(scan) => {
match last_instr_accessing_cell(output, PtrOffset::Zero) {
Some((_, prev_instr)) => match prev_instr {
InstrAccessingCell::SetCell(SetCell { value, .. })
| InstrAccessingCell::ScanLoop(ScanLoop { value, .. })
if value == &scan.value => {}
InstrAccessingCell::Loop(_) | InstrAccessingCell::ProgramStart(_)
if 0 == scan.value => {}
_ => output.push(instr),
},
_ => output.push(instr),
}
}
_ => output.push(instr),
}
}
//println!("Optimize");
//println!(" Input: {:?}", input);
let mut output: Vec<Instruction> = Vec::with_capacity(input.len());
for instr in input {
optimized_add_instr(&mut output, instr.clone());
}
output
}
fn optimize_reordering_pass() {
todo!();
}
fn optimize_instructions(input: &[Instruction]) -> Vec<Instruction> {
let mut pre = input.to_vec();
let mut post: Vec<Instruction>;
loop {
post = optimize_coalescing_pass(&pre);
if post == pre {
break;
}
pre = post;
}
post
}
impl ToTokens for Ast {
fn to_tokens(&self, tokens: &mut TokenStream) {
let mut instr_tokens = TokenStream::new();
for instr in &self.instructions {
instr.to_tokens(&mut instr_tokens);
}
(quote! {
{
#instr_tokens
}
})
.to_tokens(tokens);
}
}
|
use crate::HittableList;
use crate::Material;
use crate::Onb;
use crate::Ray;
use crate::Vec3;
use crate::AABB;
use std::sync::Arc;
extern crate rand;
use rand::Rng;
const INFINITY: f64 = 1e15;
pub trait Object {
fn hit(&self, r: Ray, t_min: f64, t_max: f64) -> Option<HitRecord>;
fn bounding_box(&self) -> Option<AABB>;
fn pdf_value(&self, _o: Vec3, _d: Vec3) -> f64 {
panic!("unimplemented!")
}
fn random(&self, _v: Vec3) -> Vec3 {
panic!("unimplemented!")
}
}
pub struct HitRecord<'a> {
pub p: Vec3,
pub normal: Vec3,
pub t: f64,
pub front_face: bool,
pub mat: Option<&'a dyn Material>,
pub u: f64,
pub v: f64,
}
impl<'a> HitRecord<'a> {
pub fn get_sphere_uv(p: Vec3) -> UV {
let phi = p.z.atan2(p.x);
let theta = p.y.asin();
let u = 1.0 - (phi + std::f64::consts::PI) / (2.0 * std::f64::consts::PI);
let v = (theta + std::f64::consts::PI / 2.0) / std::f64::consts::PI;
UV::new(u, v)
}
}
pub struct UV {
u: f64,
v: f64,
}
impl UV {
pub fn new(a: f64, b: f64) -> Self {
Self { u: a, v: b }
}
}
pub struct Sphere<T: Material> {
pub center: Vec3,
pub radius: f64,
pub mat: T,
}
impl<T: Material> Sphere<T> {
pub fn new(v: Vec3, r: f64, m: T) -> Self {
Self {
center: v,
radius: r,
mat: m,
}
}
}
impl<T: Material> Object for Sphere<T> {
fn hit(&self, r: Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
let oc = r.beg - self.center;
let a = r.dir.length_squared();
let half_b: f64 = oc * r.dir;
let c = oc.length_squared() - self.radius * self.radius;
let ans = half_b * half_b - a * c;
if ans > 0.0 {
let root = ans.sqrt();
let mut temp = (-half_b - root) / a;
if temp < t_max && temp > t_min {
let outward_normal: Vec3 = (r.at(temp) - self.center) / self.radius;
let k = (outward_normal * r.dir) < 0.0;
let mut tmpp = outward_normal;
if !k {
tmpp = -outward_normal;
}
let uv_ = HitRecord::get_sphere_uv((r.at(temp) - self.center) / self.radius);
return Option::Some(HitRecord {
p: r.at(temp),
normal: tmpp,
t: temp,
front_face: k,
mat: Option::Some(&self.mat),
u: uv_.u,
v: uv_.v,
});
}
temp = (-half_b + root) / a;
if temp < t_max && temp > t_min {
let outward_normal: Vec3 = (r.at(temp) - self.center) / self.radius;
let k = (outward_normal * r.dir) < 0.0;
let mut tmpp = outward_normal;
if !k {
tmpp = -outward_normal;
}
let uv_ = HitRecord::get_sphere_uv((r.at(temp) - self.center) / self.radius);
return Option::Some(HitRecord {
p: r.at(temp),
normal: tmpp,
t: temp,
front_face: k,
mat: Option::Some(&self.mat),
u: uv_.u,
v: uv_.v,
});
}
}
Option::None
}
fn bounding_box(&self) -> Option<AABB> {
Option::Some(AABB::new(
self.center - Vec3::new(self.radius, self.radius, self.radius),
self.center + Vec3::new(self.radius, self.radius, self.radius),
))
}
fn pdf_value(&self, o: Vec3, d: Vec3) -> f64 {
let rec = self.hit(Ray::new(o, d), 0.001, INFINITY);
match rec {
None => 0.0,
Some(_r) => {
let cotma =
(1.0 - self.radius * self.radius / ((self.center - o).length_squared())).sqrt();
let solid_ang = 2.0 * std::f64::consts::PI * (1.0 - cotma);
1.0 / solid_ang
}
}
}
fn random(&self, v: Vec3) -> Vec3 {
let di = self.center - v;
let dis_sq = di.length_squared();
let uvw = Onb::build_from_w(di);
uvw.local(Vec3::random_to_sphere(self.radius, dis_sq))
}
}
#[derive(Clone, Debug, Copy)]
pub struct XYRect<T: Material> {
mp: T,
x0: f64,
x1: f64,
y0: f64,
y1: f64,
k: f64,
}
impl<T: Material> XYRect<T> {
pub fn new(a_: f64, b_: f64, c_: f64, d_: f64, f_: f64, e_: T) -> Self {
Self {
mp: e_,
x0: a_,
x1: b_,
y0: c_,
y1: d_,
k: f_,
}
}
}
impl<T: Material> Object for XYRect<T> {
fn hit(&self, r: Ray, t0: f64, t1: f64) -> Option<HitRecord> {
let t_ = (self.k - r.beg.z) / r.dir.z;
if t_ < t0 || t_ > t1 {
return Option::None;
}
let x = r.beg.x + r.dir.x * t_;
let y = r.beg.y + r.dir.y * t_;
if x < self.x0 || x > self.x1 || y < self.y0 || y > self.y1 {
return Option::None;
}
let outward_normal = Vec3::new(0.0, 0.0, 1.0);
Option::Some(HitRecord {
p: r.at(t_),
normal: {
if (r.dir * outward_normal) < 0.0 {
outward_normal
} else {
-outward_normal
}
},
t: t_,
front_face: (r.dir * outward_normal) < 0.0,
mat: Option::Some(&self.mp),
u: (x - self.x0) / (self.x1 - self.x0),
v: (y - self.y0) / (self.y1 - self.y0),
})
}
fn bounding_box(&self) -> Option<AABB> {
Option::Some(AABB::new(
Vec3::new(self.x0, self.y0, self.k - 0.0001),
Vec3::new(self.x1, self.y1, self.k + 0.0001),
))
}
fn pdf_value(&self, o: Vec3, d: Vec3) -> f64 {
let rec = self.hit(Ray::new(o, d), 0.001, INFINITY);
match rec {
None => 0.0,
Some(rec) => {
let area = (self.x1 - self.x0) * (self.y1 - self.y0);
let dis = rec.t * rec.t * d.length_squared();
let co = (d * rec.normal / d.length()).abs();
dis / (co * area)
}
}
}
fn random(&self, v: Vec3) -> Vec3 {
let mut rng = rand::thread_rng();
let ran = Vec3::new(
rng.gen_range(self.x0, self.x1),
self.k,
rng.gen_range(self.y0, self.y1),
);
ran - v
}
}
#[derive(Clone, Debug, Copy)]
pub struct XZRect<T: Material> {
mp: T,
x0: f64,
x1: f64,
z0: f64,
z1: f64,
k: f64,
}
impl<T: Material> XZRect<T> {
pub fn new(a_: f64, b_: f64, c_: f64, d_: f64, f_: f64, e_: T) -> Self {
Self {
mp: e_,
x0: a_,
x1: b_,
z0: c_,
z1: d_,
k: f_,
}
}
}
impl<T: Material> Object for XZRect<T> {
fn hit(&self, r: Ray, t0: f64, t1: f64) -> Option<HitRecord> {
let t_ = (self.k - r.beg.y) / r.dir.y;
if t_ < t0 || t_ > t1 {
return Option::None;
}
let x = r.beg.x + r.dir.x * t_;
let z = r.beg.z + r.dir.z * t_;
if x < self.x0 || x > self.x1 || z < self.z0 || z > self.z1 {
return Option::None;
}
let outward_normal = Vec3::new(0.0, 1.0, 0.0);
Option::Some(HitRecord {
p: r.at(t_),
normal: {
if (r.dir * outward_normal) < 0.0 {
outward_normal
} else {
-outward_normal
}
},
t: t_,
front_face: (r.dir * outward_normal) < 0.0,
mat: Option::Some(&self.mp),
u: (x - self.x0) / (self.x1 - self.x0),
v: (z - self.z0) / (self.z1 - self.z0),
})
}
fn bounding_box(&self) -> Option<AABB> {
Option::Some(AABB::new(
Vec3::new(self.x0, self.k - 0.0001, self.z0),
Vec3::new(self.x1, self.k + 0.0001, self.z1),
))
}
fn pdf_value(&self, o: Vec3, d: Vec3) -> f64 {
let rec = self.hit(Ray::new(o, d), 0.001, INFINITY);
match rec {
None => 0.0,
Some(rec) => {
let area = (self.x1 - self.x0) * (self.z1 - self.z0);
let dis = rec.t * rec.t * d.length_squared();
let co = (d * rec.normal / d.length()).abs();
dis / (co * area)
}
}
}
fn random(&self, v: Vec3) -> Vec3 {
let mut rng = rand::thread_rng();
let ran = Vec3::new(
rng.gen_range(self.x0, self.x1),
self.k,
rng.gen_range(self.z0, self.z1),
);
ran - v
}
}
#[derive(Clone, Debug, Copy)]
pub struct YZRrect<T: Material> {
mp: T,
y0: f64,
y1: f64,
z0: f64,
z1: f64,
k: f64,
}
impl<T: Material> YZRrect<T> {
pub fn new(a_: f64, b_: f64, c_: f64, d_: f64, f_: f64, e_: T) -> Self {
Self {
mp: e_,
y0: a_,
y1: b_,
z0: c_,
z1: d_,
k: f_,
}
}
}
impl<T: Material> Object for YZRrect<T> {
fn hit(&self, r: Ray, t0: f64, t1: f64) -> Option<HitRecord> {
let t_ = (self.k - r.beg.x) / r.dir.x;
if t_ < t0 || t_ > t1 {
return Option::None;
}
let y = r.beg.y + r.dir.y * t_;
let z = r.beg.z + r.dir.z * t_;
if y < self.y0 || y > self.y1 || z < self.z0 || z > self.z1 {
return Option::None;
}
let outward_normal = Vec3::new(1.0, 0.0, 0.0);
Option::Some(HitRecord {
p: r.at(t_),
normal: {
if (r.dir * outward_normal) < 0.0 {
outward_normal
} else {
-outward_normal
}
},
t: t_,
front_face: (r.dir * outward_normal) < 0.0,
mat: Option::Some(&self.mp),
u: (y - self.y0) / (self.y1 - self.y0),
v: (z - self.z0) / (self.z1 - self.z0),
})
}
fn bounding_box(&self) -> Option<AABB> {
Option::Some(AABB::new(
Vec3::new(self.k - 0.0001, self.y0, self.z0),
Vec3::new(self.k + 0.0001, self.y1, self.z1),
))
}
fn pdf_value(&self, o: Vec3, d: Vec3) -> f64 {
let rec = self.hit(Ray::new(o, d), 0.001, INFINITY);
match rec {
None => 0.0,
Some(rec) => {
let area = (self.y1 - self.y0) * (self.z1 - self.z0);
let dis = rec.t * rec.t * d.length_squared();
let co = (d * rec.normal / d.length()).abs();
dis / (co * area)
}
}
}
fn random(&self, v: Vec3) -> Vec3 {
let mut rng = rand::thread_rng();
let ran = Vec3::new(
rng.gen_range(self.y0, self.y1),
self.k,
rng.gen_range(self.z0, self.z1),
);
ran - v
}
}
pub struct Box {
pub box_min: Vec3,
pub box_max: Vec3,
pub sides: HittableList,
}
impl Box {
pub fn new<U: Material + Clone + 'static>(mi: Vec3, ma: Vec3, p: U) -> Self {
let mut wor = HittableList::new();
wor.add(Arc::new(XYRect::new(
mi.x,
ma.x,
mi.y,
ma.y,
ma.z,
p.clone(),
)));
wor.add(Arc::new(XYRect::new(
mi.x,
ma.x,
mi.y,
ma.y,
mi.z,
p.clone(),
)));
wor.add(Arc::new(XZRect::new(
mi.x,
ma.x,
mi.z,
ma.z,
ma.y,
p.clone(),
)));
wor.add(Arc::new(XZRect::new(
mi.x,
ma.x,
mi.z,
ma.z,
mi.y,
p.clone(),
)));
wor.add(Arc::new(YZRrect::new(
mi.y,
ma.y,
mi.z,
ma.z,
ma.x,
p.clone(),
)));
wor.add(Arc::new(YZRrect::new(mi.y, ma.y, mi.z, ma.z, mi.x, p)));
Self {
box_min: mi,
box_max: ma,
sides: wor,
}
}
}
impl Object for Box {
fn hit(&self, r: Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
self.sides.hit(r, t_min, t_max)
}
fn bounding_box(&self) -> Option<AABB> {
Option::Some(AABB::new(self.box_min, self.box_max))
}
}
pub struct Translate<T: Object> {
pub ptr: T,
pub offset: Vec3,
}
impl<T: Object> Translate<T> {
pub fn new(p: T, v: Vec3) -> Self {
Self { ptr: p, offset: v }
}
}
impl<T: Object> Object for Translate<T> {
fn hit(&self, r: Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
let mor = Ray::new(r.beg - self.offset, r.dir);
let wmm = self.ptr.hit(mor, t_min, t_max);
match wmm {
None => None,
Some(k) => {
let mut ret = k;
ret.p += self.offset;
ret.normal = {
if mor.dir * ret.normal < 0.0 {
ret.normal
} else {
-ret.normal
}
};
Some(ret)
}
}
}
fn bounding_box(&self) -> Option<AABB> {
let tmp = self.ptr.bounding_box();
match tmp {
None => None,
Some(k) => Some(AABB::new(k.min + self.offset, k.max + self.offset)),
}
}
}
pub struct RotateY<T: Object> {
ptr: T,
sin_theta: f64,
cos_theta: f64,
hasbox: bool,
bbox: AABB,
}
impl<T: Object> RotateY<T> {
pub fn new(p: T, angle: f64) -> Self {
let radians = angle * std::f64::consts::PI / 180.0;
let co = radians.cos();
let si = radians.sin();
let get = p.bounding_box();
let mut tt = get.unwrap();
let mi = Vec3::new(INFINITY, INFINITY, INFINITY);
let ma = Vec3::new(-INFINITY, -INFINITY, -INFINITY);
for i in 0..2 {
for j in 0..2 {
for k in 0..2 {
let x = tt.max.x * i as f64 + (1.0 - i as f64) * tt.min.x;
let y = tt.max.y * j as f64 + (1.0 - j as f64) * tt.min.y;
let z = tt.max.z * k as f64 + (1.0 - k as f64) * tt.min.z;
let newx = x * co + z * si;
let newz = -si * x + co * z;
let tes = Vec3::new(newx, y, newz);
mi.x.min(tes.x);
ma.x.max(tes.x);
mi.y.min(tes.y);
ma.y.max(tes.y);
mi.z.min(tes.z);
ma.z.max(tes.z);
}
}
}
tt = AABB::new(mi, ma);
Self {
ptr: p,
sin_theta: si,
cos_theta: co,
hasbox: {
match get {
None => false,
Some(_w) => true,
}
},
bbox: tt,
}
}
}
impl<T: Object> Object for RotateY<T> {
fn hit(&self, r: Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
let mut ori = r.beg;
let mut di = r.dir;
ori.x = self.cos_theta * r.beg.x - self.sin_theta * r.beg.z;
ori.z = self.sin_theta * r.beg.x + self.cos_theta * r.beg.z;
di.x = self.cos_theta * r.dir.x - self.sin_theta * r.dir.z;
di.z = self.sin_theta * r.dir.x + self.cos_theta * r.dir.z;
let ror = Ray::new(ori, di);
let ww = self.ptr.hit(ror, t_min, t_max);
match ww {
None => None,
Some(k) => {
let mut p = k.p;
let mut nor = k.normal;
p.x = self.cos_theta * k.p.x + self.sin_theta * k.p.z;
p.z = -self.sin_theta * k.p.x + self.cos_theta * k.p.z;
nor.x = self.cos_theta * k.normal.x + self.sin_theta * k.normal.z;
nor.z = -self.sin_theta * k.normal.x + self.cos_theta * k.normal.z;
let mut ret = k;
ret.p = p;
ret.normal = {
if ror.dir * nor < 0.0 {
nor
} else {
-nor
}
};
Option::Some(ret)
}
}
}
fn bounding_box(&self) -> Option<AABB> {
if self.hasbox {
Some(self.bbox)
} else {
None
}
}
}
pub struct FlipFace<T: Object> {
ptr: T,
}
impl<T: Object> FlipFace<T> {
pub fn new(a: T) -> Self {
Self { ptr: a }
}
}
impl<T: Object> Object for FlipFace<T> {
fn hit(&self, r: Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
let rec = self.ptr.hit(r, t_min, t_max);
match rec {
None => None,
Some(k) => {
let mut p = k;
p.front_face = !p.front_face;
Some(p)
}
}
}
fn bounding_box(&self) -> Option<AABB> {
self.ptr.bounding_box()
}
}
|
use gcp_bigquery_client::error::BQError;
use thiserror::Error;
use url;
#[derive(Error, Debug)]
pub enum BigQuerySourceError {
#[error(transparent)]
ConnectorXError(#[from] crate::errors::ConnectorXError),
#[error(transparent)]
BQError(#[from] BQError),
#[error(transparent)]
BigQueryUrlError(#[from] url::ParseError),
#[error(transparent)]
BigQueryStdError(#[from] std::io::Error),
#[error(transparent)]
BigQueryJsonError(#[from] serde_json::Error),
#[error(transparent)]
BigQueryParseFloatError(#[from] std::num::ParseFloatError),
#[error(transparent)]
BigQueryParseIntError(#[from] std::num::ParseIntError),
/// Any other errors that are too trivial to be put here explicitly.
#[error(transparent)]
Other(#[from] anyhow::Error),
}
|
// Copyright 2020 David Li
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use protobuf_codegen::Customize;
use protoc_grpcio;
fn main() {
let proto_root = "proto";
println!("cargo:rerun-if-changed={}", proto_root);
let customize = Customize {
serde_derive: Some(true),
..Default::default()
};
protoc_grpcio::compile_grpc_protos(
&["route_guide.proto"],
&[proto_root],
"src",
Some(customize),
)
.expect("Failed to compile route_guide.proto!");
}
|
use std::ops::{Deref, DerefMut};
use super::*;
/// The Vector type a sequence of Values that can be accessed in constant time
/// (although insertions and deletions are linear time).
pub struct Vector {
elements: Vec<Value>,
}
impl Vector {
pub fn with_capacity(gc: &mut GarbageCollector, capacity: usize) -> GcPointer<Self> {
gc.allocate(Self {
elements: Vec::with_capacity(capacity),
})
}
pub fn from_cons(
gc: &mut GarbageCollector,
val: Value,
) -> Result<GcPointer<Self>, ConsToNativeError> {
Ok(gc.allocate(Self {
elements: Cons::to_vec(val)?,
}))
}
pub fn from_iter(
gc: &mut GarbageCollector,
iter: impl Iterator<Item = Value>,
) -> GcPointer<Self> {
let mut this = Self::with_capacity(gc, 0);
for item in iter {
this.push(item);
}
this
}
pub fn to_cons(&self, gc: &mut GarbageCollector) -> Value {
let mut result = Value::encode_null_value();
for i in (0..self.len()).rev() {
result = Value::encode_object_value(Cons::new(gc, self.elements[i], result));
}
result
}
}
unsafe impl Trace for Vector {
fn trace(&self, visitor: &mut Tracer) {
self.elements.trace(visitor);
}
}
impl GcCell for Vector {}
impl Deref for Vector {
type Target = Vec<Value>;
fn deref(&self) -> &Self::Target {
&self.elements
}
}
impl DerefMut for Vector {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.elements
}
}
|
use std::collections::VecDeque;
use std::cell::RefCell;
use crate::interpreter::cache;
use crate::ast::expressions::{self, primitives};
use crate::ast::lexer::tokens;
use crate::ast::parser;
use crate::ast::rules;
use crate::ast::stack;
pub struct Indexing {
pub object: Box<dyn expressions::Expression>,
pub index: Box<dyn expressions::Expression>,
/// We need interior mutability to update cache
pub cache: RefCell<cache::Cache>
}
impl expressions::Expression for Indexing {}
impl ::std::fmt::Debug for Indexing {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "Indexing {{ object: {:?}, index: {:?} }}", self.object, self.index)
}
}
impl Indexing {
/// .Name indexing
pub fn new_object(stack: &mut stack::Stack) {
// Remove dot from stack
let (name, _dot) = stack_unpack!(stack, single, single);
stack.push_single(name);
Indexing::new(stack)
}
/// ["String"] indexing
pub fn new_table(stack: &mut stack::Stack) {
rules::remove_enclosing_brackets(stack);
Indexing::new(stack)
}
/// Createx indexing for its prefix
pub fn new(stack: &mut stack::Stack) {
let (index, object) = stack_unpack!(stack, single, single);
stack.push_single(Box::new(Indexing { object, index, cache: RefCell::new(cache::Cache::default()) }));
}
pub fn new_indexing_chain(stack: &mut stack::Stack) {
let (chain, mut object) = stack_unpack!(stack, repetition, single);
for index in chain.into_iter() {
object = Box::new(Indexing { object, index, cache: RefCell::new(cache::Cache::default()) })
}
stack.push_single(object)
}
}
#[derive(Debug)]
pub struct TableField {
pub key: Option<Box<dyn expressions::Expression>>,
pub value: Box<dyn expressions::Expression>,
}
impl expressions::Expression for TableField {}
impl TableField {
pub fn name_rule(parser: &mut parser::Parser, stack: &mut stack::Stack) -> bool {
if let Some(tokens::Token {
token: tokens::TokenType::Id(string),
..
}) = parser.peek().cloned()
{
parser.shift();
stack.push_single(Box::new(primitives::String(string)));
true
} else {
false
}
}
// terminal!(Keyword::LSBRACKET), exp, terminal!(Keyword::RSBRACKET), terminal!(Keyword::EQUAL), exp
pub fn new_table_index(stack: &mut stack::Stack) {
let (value, _assign, _rb, key, _lb) =
stack_unpack!(stack, single, single, single, single, single);
stack.push_single(Box::new(TableField {
key: Some(key),
value,
}))
}
// variables::Id::rule, terminal!(Keyword::EQUAL), exp
pub fn new_object_index(stack: &mut stack::Stack) {
let (value, _assign, key) = stack_unpack!(stack, single, single, single);
stack.push_single(Box::new(TableField {
key: Some(key),
value,
}))
}
// exp
pub fn new_value(stack: &mut stack::Stack) {
let value = stack.pop_single();
stack.push_single(Box::new(TableField { key: None, value }))
}
/// Sequence of fields. We either first field or consequential
pub fn new_list_name(stack: &mut stack::Stack) {
let field = stack.pop_single();
match stack.peek() {
// If we already had fields before
stack::Element::Repetition(_) => {
let mut fieldlist = stack.pop_repetition();
fieldlist.push_back(field);
stack.push_repetition(fieldlist)
}
// First field
_ => stack.push_repetition(VecDeque::from(vec![field])),
}
}
}
#[derive(Debug)]
pub struct Table(pub VecDeque<Box<dyn expressions::Expression>>);
impl expressions::Expression for Table {}
impl Table {
// tableconstructor ::= ‘{’ [fieldlist] ‘}’
pub fn new(stack: &mut stack::Stack) {
let _rbr = stack.pop_single();
// If had some fieldlist
if let stack::Element::Repetition(_) = stack.peek() {
let (fieldlist, _lbr) = stack_unpack!(stack, repetition, single);
stack.push_single(Box::new(Table(fieldlist)))
} else {
let _lbr = stack.pop_single();
stack.push_single(Box::new(Table(VecDeque::new())))
}
}
}
|
pub mod calibration;
pub mod detection;
pub mod errors;
pub mod game;
pub mod graphics;
pub mod utils;
|
use super::{chunk_header::*, chunk_type::*, *};
use bytes::{Buf, BufMut, Bytes, BytesMut};
use std::fmt;
///chunkSelectiveAck represents an SCTP Chunk of type SACK
///
///This chunk is sent to the peer endpoint to acknowledge received DATA
///chunks and to inform the peer endpoint of gaps in the received
///subsequences of DATA chunks as represented by their TSNs.
///0 1 2 3
///0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
///| Type = 3 |Chunk Flags | Chunk Length |
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
///| Cumulative TSN Ack |
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
///| Advertised Receiver Window Credit (a_rwnd) |
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
///| Number of Gap Ack Blocks = N | Number of Duplicate TSNs = X |
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
///| Gap Ack Block #1 Start | Gap Ack Block #1 End |
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
///| |
///| ... |
///| |
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
///| Gap Ack Block #N Start | Gap Ack Block #N End |
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
///| Duplicate TSN 1 |
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
///| |
///| ... |
///| |
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
///| Duplicate TSN X |
///+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#[derive(Debug, Default, Copy, Clone)]
pub(crate) struct GapAckBlock {
pub(crate) start: u16,
pub(crate) end: u16,
}
/// makes gapAckBlock printable
impl fmt::Display for GapAckBlock {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{} - {}", self.start, self.end)
}
}
#[derive(Default, Debug)]
pub(crate) struct ChunkSelectiveAck {
pub(crate) cumulative_tsn_ack: u32,
pub(crate) advertised_receiver_window_credit: u32,
pub(crate) gap_ack_blocks: Vec<GapAckBlock>,
pub(crate) duplicate_tsn: Vec<u32>,
}
/// makes chunkSelectiveAck printable
impl fmt::Display for ChunkSelectiveAck {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut res = format!(
"SACK cumTsnAck={} arwnd={} dupTsn={:?}",
self.cumulative_tsn_ack, self.advertised_receiver_window_credit, self.duplicate_tsn
);
for gap in &self.gap_ack_blocks {
res += format!("\n gap ack: {}", gap).as_str();
}
write!(f, "{}", res)
}
}
pub(crate) const SELECTIVE_ACK_HEADER_SIZE: usize = 12;
impl Chunk for ChunkSelectiveAck {
fn header(&self) -> ChunkHeader {
ChunkHeader {
typ: CT_SACK,
flags: 0,
value_length: self.value_length() as u16,
}
}
fn unmarshal(raw: &Bytes) -> Result<Self, Error> {
let header = ChunkHeader::unmarshal(raw)?;
if header.typ != CT_SACK {
return Err(Error::ErrChunkTypeNotSack);
}
if raw.len() < CHUNK_HEADER_SIZE + SELECTIVE_ACK_HEADER_SIZE {
return Err(Error::ErrSackSizeNotLargeEnoughInfo);
}
let reader = &mut raw.slice(CHUNK_HEADER_SIZE..CHUNK_HEADER_SIZE + header.value_length());
let cumulative_tsn_ack = reader.get_u32();
let advertised_receiver_window_credit = reader.get_u32();
let gap_ack_blocks_len = reader.get_u16() as usize;
let duplicate_tsn_len = reader.get_u16() as usize;
if raw.len()
!= CHUNK_HEADER_SIZE
+ SELECTIVE_ACK_HEADER_SIZE
+ (4 * gap_ack_blocks_len + 4 * duplicate_tsn_len)
{
return Err(Error::ErrSackSizeNotMatchPredicted);
}
let mut gap_ack_blocks = vec![];
let mut duplicate_tsn = vec![];
for _ in 0..gap_ack_blocks_len {
let start = reader.get_u16();
let end = reader.get_u16();
gap_ack_blocks.push(GapAckBlock { start, end });
}
for _ in 0..duplicate_tsn_len {
duplicate_tsn.push(reader.get_u32());
}
Ok(ChunkSelectiveAck {
cumulative_tsn_ack,
advertised_receiver_window_credit,
gap_ack_blocks,
duplicate_tsn,
})
}
fn marshal_to(&self, writer: &mut BytesMut) -> Result<usize, Error> {
self.header().marshal_to(writer)?;
writer.put_u32(self.cumulative_tsn_ack);
writer.put_u32(self.advertised_receiver_window_credit);
writer.put_u16(self.gap_ack_blocks.len() as u16);
writer.put_u16(self.duplicate_tsn.len() as u16);
for g in &self.gap_ack_blocks {
writer.put_u16(g.start);
writer.put_u16(g.end);
}
for t in &self.duplicate_tsn {
writer.put_u32(*t);
}
Ok(writer.len())
}
fn check(&self) -> Result<(), Error> {
Ok(())
}
fn value_length(&self) -> usize {
SELECTIVE_ACK_HEADER_SIZE + self.gap_ack_blocks.len() * 4 + self.duplicate_tsn.len() * 4
}
fn as_any(&self) -> &(dyn Any + Send + Sync) {
self
}
}
|
use structopt::StructOpt;
use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
#[derive(StructOpt)]
struct Cli {
#[structopt(short = "w", long = "words", parse(from_os_str), default_value = "/usr/share/dict/words")]
words_file: std::path::PathBuf,
#[structopt(short = "v", long = "verbose")]
verbose: bool,
primary_character: char,
other_characters: String,
}
fn main() {
let args = Cli::from_args();
if args.verbose {
println!("Searching for solutions...");
println!("");
}
let mut allowed_string = args.other_characters;
allowed_string.push(args.primary_character);
if let Ok(lines) = read_lines(args.words_file) {
for line in lines {
if let Ok(ip) = line {
if word_matches(&ip, args.primary_character, &allowed_string) {
println!("{}", ip);
}
}
}
}
if args.verbose {
println!("");
println!("Done!");
}
}
// The output is wrapped in a Result to allow matching on errors
// Returns an Iterator to the Reader of the lines of the file.
fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
where P: AsRef<Path>, {
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
fn word_matches(word: &String, primary_character: char, allowed_characters: &String) -> bool {
if word.len() < 4 {
return false;
}
if word.contains(primary_character) == false {
return false;
}
for char in word.chars() {
if allowed_characters.contains(char) {
continue;
}
return false;
}
return true;
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn word_matches_too_short() {
assert_eq!(word_matches(&"foo".to_string(), 'f', &"fo".to_string()), false);
}
#[test]
fn word_matches_does_not_contain_primary_character() {
assert_eq!(word_matches(&"foobar".to_string(), 't', &"fotbar".to_string()), false);
}
#[test]
fn word_matches_contains_disallowed_characters() {
assert_eq!(word_matches(&"foobar".to_string(), 'o', &"fotba".to_string()), false);
// the 'r' is disallowed.
}
#[test]
fn word_matches_passes() {
assert_eq!(word_matches(&"foobar".to_string(), 'o', &"fobar".to_string()), true);
}
}
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
use std::path::PathBuf;
use anyhow::Error;
use reverie::process::Command;
use reverie::process::Mount;
use reverie::process::Namespace;
use reverie::process::Output;
use reverie::process::Stdio;
use reverie::Subscription;
use reverie::Tool;
use reverie_ptrace::GdbConnection;
pub struct GdbServerCommand {
// NB: ideally we could also attach to a existing pid, but this is not
// supported by reverie yet..
program_to_run: PathBuf,
program_args: Vec<String>,
connection: GdbConnection,
}
#[derive(Default)]
struct TestTool;
impl Tool for TestTool {
type GlobalState = ();
type ThreadState = ();
fn subscriptions(_cfg: &()) -> Subscription {
Subscription::all()
}
}
async fn run(command: Command, connection: GdbConnection) -> Result<Output, Error> {
let (output, _global_state) = reverie_ptrace::TracerBuilder::<TestTool>::new(command)
.gdbserver(connection)
.spawn()
.await?
.wait_with_output()
.await?;
Ok(output)
}
impl GdbServerCommand {
pub fn new<A, P, S>(program_to_run: P, program_args: A, connection: GdbConnection) -> Self
where
P: Into<PathBuf>,
A: IntoIterator<Item = S>,
S: AsRef<str>,
{
GdbServerCommand {
program_to_run: program_to_run.into(),
program_args: program_args
.into_iter()
.map(|s| String::from(s.as_ref()))
.collect(),
connection,
}
}
/// run gdbserver under namespace
pub async fn output(self) -> Result<Output, Error> {
let mut command = Command::new(&self.program_to_run);
command.args(&self.program_args);
command
.unshare(Namespace::PID)
.map_root()
.hostname("hermetic-container.local")
.domainname("local")
.mount(Mount::proc())
.stdout(Stdio::piped())
.stderr(Stdio::piped());
run(command, self.connection).await
}
}
|
//! Module containing functions executed by the thread in charge of updating the output report every 1 second
use std::collections::HashSet;
use std::fs::File;
use std::io::{BufWriter, Seek, SeekFrom, Write};
use std::sync::{Arc, Condvar, Mutex};
use std::thread;
use std::time::Duration;
use crate::gui::types::status::Status;
use crate::utils::formatted_strings::get_report_path;
use crate::InfoTraffic;
/// The calling thread enters in a loop in which it sleeps for 1 second and then
/// updates the output report containing detailed traffic information
pub fn sleep_and_write_report_loop(
current_capture_id: &Arc<Mutex<u16>>,
info_traffic_mutex: &Arc<Mutex<InfoTraffic>>,
status_pair: &Arc<(Mutex<Status>, Condvar)>,
) {
let cvar = &status_pair.1;
let path_report = get_report_path();
let mut capture_id = *current_capture_id.lock().unwrap();
let mut output =
BufWriter::new(File::create(path_report.clone()).expect("Error creating output file\n\r"));
writeln!(output, "---------------------------------------------------------------------------------------------------------------------------------------------------------------------").expect("Error writing output file\n\r");
writeln!(output, "| Src IP address | Src port | Dst IP address | Dst port | Layer 4 | Layer 7 | Packets | Bytes | Initial timestamp | Final timestamp |").expect("Error writing output file\n\r");
writeln!(output, "---------------------------------------------------------------------------------------------------------------------------------------------------------------------").expect("Error writing output file\n\r");
loop {
// sleep 1 second
thread::sleep(Duration::from_secs(1));
let current_capture_id_lock = current_capture_id.lock().unwrap();
if *current_capture_id_lock != capture_id {
capture_id = *current_capture_id_lock;
output = BufWriter::new(
File::create(path_report.clone()).expect("Error creating output file\n\r"),
);
writeln!(output, "---------------------------------------------------------------------------------------------------------------------------------------------------------------------").expect("Error writing output file\n\r");
writeln!(output, "| Src IP address | Src port | Dst IP address | Dst port | Layer 4 | Layer 7 | Packets | Bytes | Initial timestamp | Final timestamp |").expect("Error writing output file\n\r");
writeln!(output, "---------------------------------------------------------------------------------------------------------------------------------------------------------------------").expect("Error writing output file\n\r");
}
drop(current_capture_id_lock);
let mut status = status_pair.0.lock().expect("Error acquiring mutex\n\r");
if *status == Status::Running {
drop(status);
let mut info_traffic = info_traffic_mutex
.lock()
.expect("Error acquiring mutex\n\r");
for index in &info_traffic.addresses_last_interval {
let key_val = info_traffic.map.get_index(*index).unwrap();
let seek_pos = 166 * 3 + 206 * (*index) as u64;
output.seek(SeekFrom::Start(seek_pos)).unwrap();
writeln!(output, "{}{}", key_val.0, key_val.1)
.expect("Error writing output file\n\r");
}
info_traffic.addresses_last_interval = HashSet::new(); // empty set
drop(info_traffic);
output.flush().expect("Error writing output file\n\r");
} else {
//status is Init
while *status == Status::Init {
status = cvar.wait(status).expect("Error acquiring mutex\n\r");
}
}
}
}
|
/*Rust通过修改注册表启用或禁用任务管理器
Published: 2018-03-21
By Yieldone
tags: Rust
开发全屏应用的时候,除了需要禁用一些ALT+F4,Win+Tab,Alt+Tab外,任务管理器也应该禁用,了解一番后,发现Ctrl+ALT+DEL组合键是Ring0级别,很难屏蔽,不能通过简单的HOOK方式让其失效,于是研究了一个最简单的方法,通过修改注册表启用禁用任务管理器
路径:HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Policies\\System
这个注册表路径里的DisableTaskmgr字段,REG_DWORD类型,如果其值为1,则禁用任务管理器,为0则启动任务管理器(修改后即时生效)
这篇文章是C语言的例子:Windows API 教程(十) 注册表操作
Rust有个名为winreg的crate,可以方便的操作注册表
*/
extern crate winreg;
use std::path::Path;
use winreg::RegKey;
use winreg::enums::*;
fn main() {
let hkcu = RegKey::predef(HKEY_CURRENT_USER);
let path = Path::new("Software\\Microsoft\\Windows\\CurrentVersion\\Policies").join("System");
let key = hkcu.create_subkey(&path).unwrap();
key.set_value("DisableTaskmgr", &1u32).unwrap();
let sz_val: String = key.get_value("DisableTaskmgr").unwrap();
println!("DisableTaskmgr = {}", sz_val);
}
// 上边的代码运行会禁用任务管理器,将&1u32修改为&0u32是启动任务管理器 |
#[allow(unused_imports)]
use nom::*;
use ast::{Ast, TypeInfo};
use parser::identifier::identifier;
/// _ts indicates that the parser combinator is a getting a type signature
named!(pub type_signature<TypeInfo>,
ws!(alt!(number_ts | string_ts | bool_ts | array_ts | custom_ts ))
);
named!(number_ts<TypeInfo>,
value!(
TypeInfo::Number,
tag!("Number")
)
);
named!(string_ts<TypeInfo>,
value!(
TypeInfo::String,
tag!("String")
)
);
named!(bool_ts<TypeInfo>,
value!(
TypeInfo::Bool,
tag!("Bool")
)
);
named!(array_ts<TypeInfo>,
do_parse!(
contained_type: delimited!(
char!('['),
type_signature, // TODO find a way to support custom types directly in the type_signature parser and datatype.
char!(']')
) >>
(TypeInfo::Array(Box::new( contained_type ) ))
)
);
named!(custom_ts<TypeInfo>,
do_parse!(
id: identifier >>
(TypeInfo::StructType{ identifier: extract_string_from_identifier(id) })
)
);
fn extract_string_from_identifier(identifier: Ast) -> String {
match identifier {
Ast::ValueIdentifier(value) => value,
_ => panic!("Parser for identifier returned something other than a ValueIdentifier.")
}
}
/// From an AST extract the type info.
/// Can panic.
fn get_type_from_ast(ast: Ast) -> TypeInfo {
match ast {
Ast::Type(info) => info,
_ => panic!("Tried to get type from non-type")
}
} |
use crate::utils::read_lines;
pub(crate) fn main() {
let filename = "B:\\Dev\\Rust\\projects\\aoc2020\\input\\3.txt";
println!("filename is {}",filename);
// One try
check_slope(filename, 3, 1);
// Many tries
let tries: Vec<(i64,i64)> = vec![(1,1),(3,1),(5,1),(7,1),(1,2)];
let result:Vec<i64> = tries.iter().map(|&t| check_slope(filename,t.0,t.1)).collect();
println!("For the second tests, the product is {}",result.iter().product::<i64>());
}
fn check_slope(puzzle_path : &str, horizontal_step:i64, vertical_step:i64) -> i64{
return if let Ok(lines) = read_lines(puzzle_path) {
// Consumes the iterator, returns an (Optional) String
let mut c_trees = 0;
let mut cur_lines_skip = vertical_step; // Don't skip first line
let mut cur_horizontal_pos = 0;
for line in lines {
if let Ok(c) = line {
if cur_lines_skip < vertical_step - 1
{
cur_lines_skip += 1;
continue
} else {
cur_lines_skip = 0;
}
if c.chars().nth(cur_horizontal_pos % c.len() as usize).unwrap() == '#'
{
c_trees += 1;
}
cur_horizontal_pos += horizontal_step as usize;
} else {
//println!("Line reading failed :(");
}
}
println!("For the travel ({},{}), {} trees were hit.",horizontal_step,vertical_step, c_trees);
c_trees
} else {
println!("Error when reading the file {}", puzzle_path);
0
}
} |
/*
* Rustパターン(記法)。
* CreatedAt: 2019-07-07
*/
fn main() {
let numbers = (2, 4, 8, 16, 32);
match numbers {
(first, _, third, _, fifth) => {
println!("Some numbers: {}, {}, {}", first, third, fifth)
},
}
}
|
use cuckoofilter::{CuckooFilter, ExportedCuckooFilter};
use parking_lot::Mutex;
use std::collections::hash_map::DefaultHasher;
use std::collections::HashMap;
use std::fmt;
use std::fmt::Debug;
use std::sync::Arc;
use subspace_core_primitives::PieceIndex;
use subspace_networking::libp2p::PeerId;
use subspace_networking::CuckooFilterDTO;
#[derive(Clone, Default)]
pub struct ArchivalStorageInfo {
peers: Arc<Mutex<HashMap<PeerId, CuckooFilter<DefaultHasher>>>>,
}
impl Debug for ArchivalStorageInfo {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ArchivalStorageInfo")
.field("peers (len)", &self.peers.lock().len())
.finish()
}
}
impl ArchivalStorageInfo {
pub fn update_cuckoo_filter(&self, peer_id: PeerId, cuckoo_filter_dto: Arc<CuckooFilterDTO>) {
let exported_filter = ExportedCuckooFilter {
values: cuckoo_filter_dto.values.clone(),
length: cuckoo_filter_dto.length as usize,
};
let cuckoo_filter = CuckooFilter::from(exported_filter);
self.peers.lock().insert(peer_id, cuckoo_filter);
}
pub fn remove_peer_filter(&self, peer_id: &PeerId) -> bool {
self.peers.lock().remove(peer_id).is_some()
}
pub fn peers_contain_piece(&self, piece_index: &PieceIndex) -> Vec<PeerId> {
let mut result = Vec::new();
for (peer_id, cuckoo_filter) in self.peers.lock().iter() {
if cuckoo_filter.contains(piece_index) {
result.push(*peer_id)
}
}
result
}
}
|
pub mod user;
pub mod profile; |
#[doc = "Register `MACQTxFCR` reader"]
pub type R = crate::R<MACQTX_FCR_SPEC>;
#[doc = "Register `MACQTxFCR` writer"]
pub type W = crate::W<MACQTX_FCR_SPEC>;
#[doc = "Field `FCB_BPA` reader - Flow Control Busy or Backpressure Activate"]
pub type FCB_BPA_R = crate::BitReader;
#[doc = "Field `FCB_BPA` writer - Flow Control Busy or Backpressure Activate"]
pub type FCB_BPA_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TFE` reader - Transmit Flow Control Enable"]
pub type TFE_R = crate::BitReader;
#[doc = "Field `TFE` writer - Transmit Flow Control Enable"]
pub type TFE_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PLT` reader - Pause Low Threshold"]
pub type PLT_R = crate::FieldReader;
#[doc = "Field `PLT` writer - Pause Low Threshold"]
pub type PLT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `DZPQ` reader - Disable Zero-Quanta Pause"]
pub type DZPQ_R = crate::BitReader;
#[doc = "Field `DZPQ` writer - Disable Zero-Quanta Pause"]
pub type DZPQ_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `PT` reader - Pause Time"]
pub type PT_R = crate::FieldReader<u16>;
#[doc = "Field `PT` writer - Pause Time"]
pub type PT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 16, O, u16>;
impl R {
#[doc = "Bit 0 - Flow Control Busy or Backpressure Activate"]
#[inline(always)]
pub fn fcb_bpa(&self) -> FCB_BPA_R {
FCB_BPA_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Transmit Flow Control Enable"]
#[inline(always)]
pub fn tfe(&self) -> TFE_R {
TFE_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bits 4:6 - Pause Low Threshold"]
#[inline(always)]
pub fn plt(&self) -> PLT_R {
PLT_R::new(((self.bits >> 4) & 7) as u8)
}
#[doc = "Bit 7 - Disable Zero-Quanta Pause"]
#[inline(always)]
pub fn dzpq(&self) -> DZPQ_R {
DZPQ_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bits 16:31 - Pause Time"]
#[inline(always)]
pub fn pt(&self) -> PT_R {
PT_R::new(((self.bits >> 16) & 0xffff) as u16)
}
}
impl W {
#[doc = "Bit 0 - Flow Control Busy or Backpressure Activate"]
#[inline(always)]
#[must_use]
pub fn fcb_bpa(&mut self) -> FCB_BPA_W<MACQTX_FCR_SPEC, 0> {
FCB_BPA_W::new(self)
}
#[doc = "Bit 1 - Transmit Flow Control Enable"]
#[inline(always)]
#[must_use]
pub fn tfe(&mut self) -> TFE_W<MACQTX_FCR_SPEC, 1> {
TFE_W::new(self)
}
#[doc = "Bits 4:6 - Pause Low Threshold"]
#[inline(always)]
#[must_use]
pub fn plt(&mut self) -> PLT_W<MACQTX_FCR_SPEC, 4> {
PLT_W::new(self)
}
#[doc = "Bit 7 - Disable Zero-Quanta Pause"]
#[inline(always)]
#[must_use]
pub fn dzpq(&mut self) -> DZPQ_W<MACQTX_FCR_SPEC, 7> {
DZPQ_W::new(self)
}
#[doc = "Bits 16:31 - Pause Time"]
#[inline(always)]
#[must_use]
pub fn pt(&mut self) -> PT_W<MACQTX_FCR_SPEC, 16> {
PT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Tx Queue flow control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`macqtx_fcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`macqtx_fcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MACQTX_FCR_SPEC;
impl crate::RegisterSpec for MACQTX_FCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`macqtx_fcr::R`](R) reader structure"]
impl crate::Readable for MACQTX_FCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`macqtx_fcr::W`](W) writer structure"]
impl crate::Writable for MACQTX_FCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets MACQTxFCR to value 0"]
impl crate::Resettable for MACQTX_FCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use super::*;
#[pymethods]
impl EnsmallenGraph {
#[text_signature = "($self, other, verbose)"]
/// Return graph remapped towards nodes of the given graph.
///
/// Parameters
/// -----------------------------
/// other: EnsmallenGraph,
/// The graph to remap towards.
/// verbose: bool = True,
/// Wether to show a loading bar. By default True.
///
/// Returns
/// -----------------------------
/// New remapped graph.
pub fn remap(&self, other: &EnsmallenGraph, verbose: Option<bool>) -> PyResult<EnsmallenGraph> {
Ok(EnsmallenGraph {
graph: pyex!(self
.graph
.remap(&other.graph, verbose.or(Some(true)).unwrap()))?,
})
}
}
|
fn main() {
let number = 12;
print!("{} {}", number, 47);
}
|
use std::error::Error as StdError;
use hyper::StatusCode;
use std::fmt;
/// The Errors that may occur when processing a `Request`.
pub struct Error {
inner: Box<Inner>,
}
pub(crate) type BoxError = Box<dyn StdError + Send + Sync>;
struct Inner {
kind: Kind,
description: String,
source: Option<BoxError>,
}
impl Error {
pub(crate) fn new<E>(kind: Kind, source: Option<E>) -> Error
where
E: Into<BoxError>,
{
Error {
inner: Box::new(Inner {
kind,
source: source.map(Into::into),
description: "".to_string(),
}),
}
}
/// Returns the status code, if the error was generated from a response.
pub fn status(&self) -> Option<StatusCode> {
match self.inner.kind.clone() {
Kind::Status(code) => Some(code),
Kind::ErrorV1(code, _0) => Some(code),
Kind::ErrorV2(code, _0, _1) => Some(code),
_ => None,
}
}
pub(crate) fn with_prefix<E: std::fmt::Display>(mut self, prefix: E) -> Error {
self.inner.description = format!("{}{}", prefix, self.inner.description);
self
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.inner.description)?;
match self.inner.kind.clone() {
Kind::Text(ref text) => {
write!(f, "{}", text)?;
}
Kind::Status(ref code) => {
let prefix = if code.is_client_error() {
"HTTP status client error"
} else {
"HTTP status server error"
};
write!(f, "{} ({})", prefix, code)?;
}
Kind::ErrorV1(code, error) => {
let prefix = if code.is_client_error() {
"HTTP status client error"
} else {
"HTTP status server error"
};
write!(f, "{} ({}) - {}", prefix, code, error)?;
}
Kind::ErrorV2(code, error, error_code) => {
let prefix = if code.is_client_error() {
"HTTP status client error"
} else {
"HTTP status server error"
};
write!(f, "{} ({}) - {} ({})", prefix, code, error, error_code)?;
}
};
Ok(())
}
}
impl fmt::Debug for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut builder = f.debug_struct("reqwest::Error");
builder.field("kind", &self.inner.kind);
if let Some(ref source) = self.inner.source {
builder.field("source", source);
}
builder.finish()
}
}
impl StdError for Error {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
self.inner.source.as_ref().map(|e| &**e as _)
}
}
#[derive(Debug, Clone, PartialEq)]
pub(crate) enum Kind {
Text(String),
Status(StatusCode),
ErrorV1(StatusCode, String),
ErrorV2(StatusCode, String, String),
}
pub(crate) fn text_error(message: String) -> Error {
Error::new(Kind::Text(message), None::<Error>)
}
pub(crate) fn text_error_with_inner<E: Into<BoxError>>(message: String, e: E) -> Error {
Error::new(Kind::Text(message), Some(e))
}
pub(crate) fn status_code(status: StatusCode) -> Error {
Error::new(Kind::Status(status), None::<Error>)
}
pub(crate) fn v1_error(status: StatusCode, error: String) -> Error {
Error::new(Kind::ErrorV1(status, error), None::<Error>)
}
pub(crate) fn v2_error(status: StatusCode, error: String, error_code: String) -> Error {
Error::new(Kind::ErrorV2(status, error, error_code), None::<Error>)
}
|
mod expr;
mod init;
mod stmt;
use std::collections::{HashSet, VecDeque};
use std::convert::TryInto;
use counter::Counter;
use crate::data::{error::Warning, hir::*, lex::Keyword, *};
use crate::intern::InternedStr;
use crate::parse::{Lexer, Parser};
use crate::RecursionGuard;
pub(crate) type TagScope = Scope<InternedStr, TagEntry>;
#[derive(Clone, Debug)]
pub(crate) enum TagEntry {
Struct(StructRef),
Union(StructRef),
// list of (name, value)s
Enum(Vec<(InternedStr, i64)>),
}
/// The driver for `PureAnalyzer`.
///
/// This implements `Iterator` and ensures that declarations and errors are returned in the correct error.
/// Use this if you want to compile an entire C program,
/// or if it is important to show errors in the correct order relative to declarations.
pub struct Analyzer<T: Lexer> {
declarations: Parser<T>,
pub inner: PureAnalyzer,
/// Whether to print each declaration as it is seen
pub debug: bool,
}
/// A `PureAnalyzer` turns AST types into HIR types.
///
/// In particular, it performs type checking and semantic analysis.
/// Use this if you need to analyze a specific AST data type without parsing a whole program.
// The struct is used mostly for holding scopes and error handler.
pub struct PureAnalyzer {
// in case a `Declaration` has multiple declarators
pending: VecDeque<Locatable<Declaration>>,
/// objects that are in scope
/// C actually has 4 different scopes:
/// 1. ordinary identifiers
/// 2. tags
/// 3. label names
/// 4. members
///
/// This holds the scope for ordinary identifiers: variables and typedefs
scope: Scope<InternedStr, Symbol>,
/// the compound types that have been declared (struct/union/enum)
/// scope 2. from above
tag_scope: TagScope,
/// Stores all variables that have been initialized so far
initialized: HashSet<Symbol>,
/// Internal API which makes it easier to return errors lazily
error_handler: ErrorHandler,
/// Internal API which prevents segfaults due to stack overflow
recursion_guard: RecursionGuard,
/// Hack to make compound assignment work
///
/// For `a += b`, `a` must only be evaluated once.
/// The way `assignment_expr` handles this is by desugaring to
/// `tmp = &a; *tmp = *tmp + b;`
/// However, the backend still has to see the declaration.
/// There's no way to return a statement from an expression,
/// so instead we store it in a side channel.
///
/// TODO: this should be a field on `FunctionAnalyzer`, not `Analyzer`
decl_side_channel: Vec<Locatable<Declaration>>,
}
impl<T: Lexer> Iterator for Analyzer<T> {
type Item = CompileResult<Locatable<Declaration>>;
fn next(&mut self) -> Option<Self::Item> {
loop {
// Instead of returning `SemanticResult`, the analyzer puts all errors into `error_handler`.
// This simplifies the logic in `next` greatly.
// NOTE: this returns errors for a declaration before the declaration itself
if let Some(err) = self.inner.error_handler.pop_front() {
return Some(Err(err));
// If we saw `int i, j, k;`, we treated those as different declarations
// `j, k` will be stored into `pending`
} else if let Some(decl) = self.inner.pending.pop_front() {
if self.debug {
println!("hir: {}", decl.data);
}
return Some(Ok(decl));
}
// Now do the real work.
let next = match self.declarations.next()? {
Err(err) => return Some(Err(err)),
Ok(decl) => decl,
};
let decls = self.inner.parse_external_declaration(next);
// TODO: if an error occurs, should we still add the declaration to `pending`?
self.inner.pending.extend(decls);
}
}
}
impl<I: Lexer> Analyzer<I> {
pub fn new(parser: Parser<I>, debug: bool) -> Self {
Self {
declarations: parser,
debug,
inner: PureAnalyzer::new(),
}
}
}
impl Default for PureAnalyzer {
fn default() -> Self {
Self::new()
}
}
impl PureAnalyzer {
pub fn new() -> Self {
Self {
error_handler: ErrorHandler::new(),
scope: Scope::new(),
tag_scope: Scope::new(),
pending: VecDeque::new(),
initialized: HashSet::new(),
recursion_guard: RecursionGuard::default(),
decl_side_channel: Vec::new(),
}
}
/// Return all warnings seen so far.
///
/// These warnings are consumed and will not be returned if you call
/// `warnings()` again.
pub fn warnings(&mut self) -> VecDeque<CompileWarning> {
std::mem::take(&mut self.error_handler.warnings)
}
// I type these a lot
#[inline(always)]
fn err(&mut self, e: SemanticError, l: Location) {
self.error_handler.error(e, l);
}
#[inline(always)]
fn warn(&mut self, w: Warning, l: Location) {
self.error_handler.warn(w, l);
}
fn recursion_check(&mut self) -> RecursionGuard {
self.recursion_guard
.recursion_check(&mut self.error_handler)
}
/// 6.9 External Definitions
///
/// Either a function or a list of declarations.
fn parse_external_declaration(
&mut self,
next: Locatable<ast::ExternalDeclaration>,
) -> Vec<Locatable<Declaration>> {
use ast::ExternalDeclaration;
match next.data {
ExternalDeclaration::Function(func) => {
let id = func.id;
let (meta_ref, body) = FunctionAnalyzer::analyze(func, self, next.location);
self.scope.insert(id, meta_ref);
let decl = Declaration {
symbol: meta_ref,
init: Some(Initializer::FunctionBody(body)),
};
vec![Locatable::new(decl, next.location)]
}
ExternalDeclaration::Declaration(declaration) => {
self.parse_declaration(declaration, next.location)
}
}
}
/// A list of declarations: `int i, j, k;`
fn parse_declaration(
&mut self,
declaration: ast::Declaration,
location: Location,
) -> Vec<Locatable<Declaration>> {
let original = self.parse_specifiers(declaration.specifiers, location);
if original.storage_class == Some(StorageClass::Auto) && self.scope.is_global() {
self.err(SemanticError::AutoAtGlobalScope, location);
}
// TODO: this is such a hack: https://github.com/jyn514/rcc/issues/371
let sc = original.storage_class.unwrap_or(StorageClass::Auto);
let mut decls = Vec::new();
for d in declaration.declarators {
let mut ctype =
self.parse_declarator(original.ctype.clone(), d.data.declarator.decl, d.location);
if !ctype.is_function() && original.qualifiers.func != FunctionQualifiers::default() {
self.err(
SemanticError::FuncQualifiersNotAllowed(original.qualifiers.func),
d.location,
);
}
let id = d.data.declarator.id;
let id = match id {
Some(i) => i,
// int i, ();
None => {
self.err("declarations cannot be abstract".into(), d.location);
"<error>".into()
}
};
// NOTE: the parser handles typedefs on its own
if ctype == Type::Void && sc != StorageClass::Typedef {
// TODO: catch this error for types besides void?
self.err(SemanticError::VoidType, location);
ctype = Type::Error;
}
let init = if let Some(init) = d.data.init {
Some(self.parse_initializer(init, &ctype, d.location))
} else {
None
};
let symbol = Variable {
ctype,
id,
qualifiers: original.qualifiers,
storage_class: sc,
};
let symbol = self.declare(symbol, init.is_some(), d.location);
if init.is_some() {
self.initialized.insert(symbol);
}
decls.push(Locatable::new(Declaration { symbol, init }, d.location));
}
// int;
if decls.is_empty() && !original.declared_compound_type {
self.warn(Warning::EmptyDeclaration, location);
}
decls
}
#[cfg(test)]
#[inline(always)]
// used only for testing, so that I can keep `parse_typename` private most of the time
pub(crate) fn parse_typename_test(&mut self, ctype: ast::TypeName, location: Location) -> Type {
self.parse_typename(ctype, location)
}
/// Perform checks for parsing a single type name.
///
/// Type names are used most often in casts: `(int)i`
/// This allows `int` or `int *` or `int (*)()`, but not `int i, j;` or `int i`
///
/// 6.7.7 Type names
fn parse_typename(&mut self, ctype: ast::TypeName, location: Location) -> Type {
let parsed = self.parse_type(ctype.specifiers, ctype.declarator.decl, location);
// TODO: should these be syntax errors instead?
// extern int
if let Some(sc) = parsed.storage_class {
self.err(SemanticError::IllegalStorageClass(sc), location);
}
// const int
if parsed.qualifiers != Qualifiers::default() {
self.warn(Warning::IgnoredQualifier(parsed.qualifiers), location);
}
// int i
if let Some(id) = ctype.declarator.id {
self.err(SemanticError::IdInTypeName(id), location);
}
parsed.ctype
}
/// Parse a single type, given the specifiers and declarator.
fn parse_type(
&mut self,
specifiers: Vec<ast::DeclarationSpecifier>,
declarator: ast::DeclaratorType,
location: Location,
) -> ParsedType {
let mut specs = self.parse_specifiers(specifiers, location);
specs.ctype = self.parse_declarator(specs.ctype, declarator, location);
if !specs.ctype.is_function() && specs.qualifiers.func != FunctionQualifiers::default() {
self.err(
SemanticError::FuncQualifiersNotAllowed(specs.qualifiers.func),
location,
);
}
specs
}
/// The specifiers for a declaration: `const extern long int`
///
/// Note that specifiers are also used for declaring structs, such as
/// ```c
/// struct s { int i; };
/// ```
/// Normally, we warn when a declaration is empty,
/// but if we declared a struct, union, or enum, then no warning is emitted.
/// This is kept track of by `declared_compound_type`.
fn parse_specifiers(
&mut self,
specifiers: Vec<ast::DeclarationSpecifier>,
location: Location,
) -> ParsedType {
use ast::{DeclarationSpecifier::*, UnitSpecifier::*};
// need to parse specifiers now
// it's not enough to collect into a `Set` since `long long` has a different meaning than `long`
// instead, we see how many times each specifier is present
// however, for some specifiers this doesn't really make sense:
// if we see `struct s { int i; }` twice in a row,
// it's more likely that the user forgot a semicolon in between than tried to make some weird double struct type.
// so: count the specifiers that are keywords and store the rest somewhere out of the way
// 6.7.2 Type specifiers
let (counter, compounds) = count_specifiers(specifiers, &mut self.error_handler, location);
// Now that we've separated this into unit specifiers and compound specifiers,
// see if we can pick up the proper types and qualifiers.
let signed = match (counter.get(&Signed), counter.get(&Unsigned)) {
// `int i` or `signed i`
(None, None) | (Some(_), None) => true,
// `unsigned i`
(None, Some(_)) => false,
// `unsigned signed i`
(Some(_), Some(_)) => {
self.err(SemanticError::ConflictingSigned, location);
true
}
};
// `long` is special because of `long long` and `long double`
let mut ctype = None;
if let Some(&long_count) = counter.get(&Long) {
match long_count {
0 => panic!("constraint violation, should only set count if > 0"),
1 => {
// NOTE: this is handled later by the big `for type in [...]` loop
// see notes there
if counter.get(&Double).is_none() {
ctype = Some(Type::Long(signed));
}
}
// TODO: implement `long long` as a separate type
2 => ctype = Some(Type::Long(signed)),
_ => {
self.err(SemanticError::TooLong(long_count), location);
ctype = Some(Type::Long(signed));
}
}
}
// 6.7.3 Type qualifiers
let qualifiers = Qualifiers {
c_const: counter.get(&Const).is_some(),
volatile: counter.get(&Volatile).is_some(),
func: FunctionQualifiers {
inline: counter.get(&Inline).is_some(),
no_return: counter.get(&NoReturn).is_some(),
},
};
// 6.7.1 Storage-class specifiers
let mut storage_class = None;
for (spec, sc) in &[
(Auto, StorageClass::Auto),
(Register, StorageClass::Register),
(Static, StorageClass::Static),
(Extern, StorageClass::Extern),
(UnitSpecifier::Typedef, StorageClass::Typedef),
] {
if counter.get(spec).is_some() {
if let Some(existing) = storage_class {
self.err(
SemanticError::ConflictingStorageClass(existing, *sc),
location,
);
}
storage_class = Some(*sc);
}
}
// back to type specifiers
// TODO: maybe use `iter!` macro instead of `vec!` to avoid an allocation?
// https://play.rust-lang.org/?gist=0535aa4f749a14cb1b28d658446f3c13
for (spec, new_ctype) in vec![
(Bool, Type::Bool),
(Char, Type::Char(signed)),
(Short, Type::Short(signed)),
// already handled `long` when we handled `long long`
(Float, Type::Float),
// NOTE: if we saw `long double` before, we'll set `ctype` to `double` now
// TODO: make `long double` different from `double`
(Double, Type::Double),
(Void, Type::Void),
(VaList, Type::VaList),
] {
if counter.get(&spec).is_some() {
match (spec, ctype) {
// `short int` and `long int` are valid, see 6.7.2
// `long` is handled earlier, so we don't have to worry
// about it here.
(_, None) | (Short, Some(Type::Int(_))) => {}
(_, Some(existing)) => {
self.err(
SemanticError::ConflictingType(existing, new_ctype.clone()),
location,
);
}
}
ctype = Some(new_ctype);
}
}
if counter.get(&Int).is_some() {
match ctype {
None => ctype = Some(Type::Int(signed)),
// `long int` is valid
Some(Type::Short(_)) | Some(Type::Long(_)) => {}
Some(existing) => {
self.err(
SemanticError::ConflictingType(existing, Type::Int(signed)),
location,
);
ctype = Some(Type::Int(signed));
}
}
}
let mut declared_compound_type = false;
for compound in compounds {
let parsed = match compound {
Unit(_) => unreachable!("already caught"),
DeclarationSpecifier::Typedef(name) => {
let meta = self
.scope
.get(&name)
.expect("scope of parser and analyzer should match")
.get();
assert_eq!(meta.storage_class, StorageClass::Typedef);
meta.ctype.clone()
}
Struct(s) => self.struct_specifier(s, true, &mut declared_compound_type, location),
Union(s) => self.struct_specifier(s, false, &mut declared_compound_type, location),
Enum { name, members } => {
self.enum_specifier(name, members, &mut declared_compound_type, location)
}
};
// TODO: this should report the name of the typedef, not the type itself
if let Some(existing) = &ctype {
self.err(
SemanticError::ConflictingType(existing.clone(), parsed.clone()),
location,
);
}
ctype = Some(parsed);
}
// Check to see if we had a conflicting `signed` specifier
// Note we use `counter` instead of the `signed` bool
// because we've already set the default and forgotten whether it was originally present.
if counter.get(&Signed).is_some() || counter.get(&Unsigned).is_some() {
match &ctype {
// unsigned int
Some(Type::Char(_)) | Some(Type::Short(_)) | Some(Type::Int(_))
| Some(Type::Long(_)) => {}
// unsigned float
Some(other) => {
let err = SemanticError::CannotBeSigned(other.clone());
self.err(err, location);
}
// unsigned i
None => ctype = Some(Type::Int(signed)),
}
}
// `i;` or `const i;`, etc.
let ctype = ctype.unwrap_or_else(|| {
self.warn(Warning::ImplicitInt, location);
Type::Int(true)
});
ParsedType {
qualifiers,
storage_class,
ctype,
declared_compound_type,
}
}
// 6.7.2.1 Structure and union specifiers
fn struct_specifier(
&mut self,
struct_spec: ast::StructSpecifier,
is_struct: bool,
declared_struct: &mut bool,
location: Location,
) -> Type {
let ast_members = match struct_spec.members {
// struct { int i; }
Some(members) => members,
// struct s
None => {
let name = if let Some(name) = struct_spec.name {
name
} else {
// struct;
let err = format!(
"bare '{}' as type specifier is not allowed",
if is_struct { "struct" } else { "union " }
);
self.error_handler.error(SemanticError::from(err), location);
return Type::Error;
};
let keyword = if is_struct {
Keyword::Struct
} else {
Keyword::Union
};
return match (is_struct, self.tag_scope.get(&name)) {
// `struct s *p;`
(_, None) => self.forward_declaration(keyword, name, location),
// `struct s; struct s;` or `struct s { int i; }; struct s`
(true, Some(TagEntry::Struct(s))) => Type::Struct(StructType::Named(name, *s)),
// `union s; union s;` or `union s { int i; }; union s`
(false, Some(TagEntry::Union(s))) => Type::Union(StructType::Named(name, *s)),
(_, Some(_)) => {
// `union s; struct s;`
if self.tag_scope.get_immediate(&name).is_some() {
let kind = if is_struct { "struct" } else { "union " };
// TODO: say what the previous declaration was
let err = SemanticError::from(format!("use of '{}' with type tag '{}' that does not match previous struct declaration", name, kind));
self.error_handler.push_back(Locatable::new(err, location));
Type::Error
} else {
// `union s; { struct s; }`
self.forward_declaration(keyword, name, location)
}
}
};
}
};
let members: Vec<_> = ast_members
.into_iter()
.map(|m| self.struct_declarator_list(m, location).into_iter())
.flatten()
.collect();
if members.is_empty() {
self.err(SemanticError::from("cannot have empty struct"), location);
return Type::Error;
}
let constructor = if is_struct { Type::Struct } else { Type::Union };
if let Some(id) = struct_spec.name {
let struct_ref = if let Some(TagEntry::Struct(struct_ref))
| Some(TagEntry::Union(struct_ref)) =
self.tag_scope.get_immediate(&id)
{
let struct_ref = *struct_ref;
// struct s { int i; }; struct s { int i; };
if !struct_ref.get().is_empty() {
self.err(
SemanticError::from(format!(
"redefinition of {} '{}'",
if is_struct { "struct" } else { "union" },
id
)),
location,
);
}
struct_ref
} else {
StructRef::new()
};
struct_ref.update(members);
let entry = if is_struct {
TagEntry::Struct
} else {
TagEntry::Union
}(struct_ref);
self.tag_scope.insert(id, entry);
*declared_struct = true;
constructor(StructType::Named(id, struct_ref))
} else {
// struct { int i; }
constructor(StructType::Anonymous(std::rc::Rc::new(members)))
}
}
/*
struct_declarator_list: struct_declarator (',' struct_declarator)* ;
struct_declarator
: declarator
| ':' constant_expr // bitfield, not supported
| declarator ':' constant_expr
;
*/
fn struct_declarator_list(
&mut self,
members: ast::StructDeclarationList,
location: Location,
) -> Vec<Variable> {
let parsed_type = self.parse_specifiers(members.specifiers, location);
if parsed_type.qualifiers.has_func_qualifiers() {
self.err(
SemanticError::FuncQualifiersNotAllowed(parsed_type.qualifiers.func),
location,
);
}
let mut parsed_members = Vec::new();
// A member of a structure or union may have any complete object type other than a variably modified type.
for ast::StructDeclarator { decl, bitfield } in members.declarators {
let decl = match decl {
// 12 A bit-field declaration with no declarator, but only a colon and a width, indicates an unnamed bit-field.
// TODO: this should give an error if `bitfield` is None.
None => continue,
Some(d) => d,
};
let ctype = match self.parse_declarator(parsed_type.ctype.clone(), decl.decl, location)
{
Type::Void => {
// TODO: catch this error for types besides void?
self.err(SemanticError::VoidType, location);
Type::Error
}
other => other,
};
let mut symbol = Variable {
storage_class: StorageClass::Auto,
qualifiers: parsed_type.qualifiers,
ctype,
id: decl.id.expect("struct members should have an id"),
};
// struct s { int i: 5 };
if let Some(bitfield) = bitfield {
let bit_size = match Self::const_uint(self.expr(bitfield)) {
Ok(e) => e,
Err(err) => {
self.error_handler.push_back(err);
1
}
};
let type_size = symbol.ctype.sizeof().unwrap_or(0);
if bit_size == 0 {
let err = SemanticError::from(format!(
"C does not have zero-sized types. hint: omit the declarator {}",
symbol.id
));
self.err(err, location);
// struct s { int i: 65 }
} else if bit_size > type_size * u64::from(crate::arch::CHAR_BIT) {
let err = SemanticError::from(format!(
"cannot have bitfield {} with size {} larger than containing type {}",
symbol.id, bit_size, symbol.ctype
));
self.err(err, location);
}
self.error_handler.warn(
"bitfields are not implemented and will be ignored",
location,
);
}
match symbol.ctype {
Type::Struct(StructType::Named(_, inner_members))
| Type::Union(StructType::Named(_, inner_members))
if inner_members.get().is_empty() =>
{
self.err(
SemanticError::from(format!(
"cannot use type '{}' before it has been defined",
symbol.ctype
)),
location,
);
// add this as a member anyway because
// later code depends on structs being non-empty
symbol.ctype = Type::Error;
}
_ => {}
}
parsed_members.push(symbol);
}
// struct s { extern int i; };
if let Some(class) = parsed_type.storage_class {
let member = parsed_members
.last()
.expect("should have seen at least one declaration");
self.err(
SemanticError::from(format!(
"cannot specify storage class '{}' for struct member '{}'",
class, member.id,
)),
location,
);
}
parsed_members
}
// 6.7.2.2 Enumeration specifiers
fn enum_specifier(
&mut self,
enum_name: Option<InternedStr>,
ast_members: Option<Vec<(InternedStr, Option<ast::Expr>)>>,
saw_enum: &mut bool,
location: Location,
) -> Type {
*saw_enum = true;
let ast_members = match ast_members {
Some(members) => members,
None => {
// enum e
let name = if let Some(name) = enum_name {
name
} else {
// enum;
let err = SemanticError::from("bare 'enum' as type specifier is not allowed");
self.error_handler.error(err, location);
return Type::Error;
};
match self.tag_scope.get(&name) {
// enum e { A }; enum e my_e;
Some(TagEntry::Enum(members)) => {
*saw_enum = false;
return Type::Enum(Some(name), members.clone());
}
// struct e; enum e my_e;
Some(_) => {
// TODO: say what the previous type was
let err = SemanticError::from(format!("use of '{}' with type tag 'enum' that does not match previous struct declaration", name));
self.error_handler.push_back(Locatable::new(err, location));
return Type::Error;
}
// `enum e;` (invalid)
None => return self.forward_declaration(Keyword::Enum, name, location),
}
}
};
let mut discriminant = 0;
let mut members = vec![];
for (name, maybe_value) in ast_members {
// enum E { A = 5 };
if let Some(value) = maybe_value {
discriminant = Self::const_sint(self.expr(value)).unwrap_or_else(|err| {
self.error_handler.push_back(err);
std::i64::MIN
});
}
members.push((name, discriminant));
// TODO: this is such a hack
let tmp_symbol = Variable {
id: name,
qualifiers: Qualifiers {
c_const: true,
..Default::default()
},
storage_class: StorageClass::Register,
ctype: Type::Enum(None, vec![(name, discriminant)]),
};
self.declare(tmp_symbol, false, location);
discriminant = discriminant.checked_add(1).unwrap_or_else(|| {
self.error_handler
.push_back(location.error(SemanticError::EnumOverflow));
0
});
}
for (name, _) in &members {
self.scope._remove(name);
}
// enum e {}
if members.is_empty() {
self.err(SemanticError::from("enums cannot be empty"), location)
}
if let Some(id) = enum_name {
// enum e { A }; enum e { A };
if self
.tag_scope
.insert(id, TagEntry::Enum(members.clone()))
.is_some()
{
self.err(format!("redefition of enum '{}'", id).into(), location);
}
}
let ctype = Type::Enum(enum_name, members);
match &ctype {
Type::Enum(_, members) => {
for &(id, _) in members {
self.scope.insert(
id,
Variable {
id,
storage_class: StorageClass::Register,
qualifiers: Qualifiers::NONE,
ctype: ctype.clone(),
}
.insert(),
);
}
}
_ => unreachable!(),
}
ctype
}
/// Used for forward declaration of structs and unions.
///
/// Does not correspond to any grammar type.
/// e.g. `struct s;`
///
/// See also 6.7.2.3 Tags:
/// > A declaration of the form `struct-or-union identifier ;`
/// > specifies a structure or union type and declares the identifier as a tag of that type.
/// > If a type specifier of the form `struct-or-union identifier`
/// > occurs other than as part of one of the above forms, and no other declaration of the identifier as a tag is visible,
/// > then it declares an incomplete structure or union type, and declares the identifier as the tag of that type.
fn forward_declaration(
&mut self,
kind: Keyword,
ident: InternedStr,
location: Location,
) -> Type {
if kind == Keyword::Enum {
// see section 6.7.2.3 of the C11 standard
self.err(
SemanticError::from(format!(
"cannot have forward reference to enum type '{}'",
ident
)),
location,
);
return Type::Enum(Some(ident), vec![]);
}
let struct_ref = StructRef::new();
let (entry_type, tag_type): (fn(_) -> _, fn(_) -> _) = if kind == Keyword::Struct {
(TagEntry::Struct, Type::Struct)
} else {
(TagEntry::Union, Type::Union)
};
let entry = entry_type(struct_ref);
self.tag_scope.insert(ident, entry);
tag_type(StructType::Named(ident, struct_ref))
}
/// Parse the declarator for a variable, given a starting type.
/// e.g. for `int *p`, takes `start: Type::Int(true)` and returns `Type::Pointer(Type::Int(true))`
///
/// The parser generated a linked list `DeclaratorType`,
/// which we now transform into the recursive `Type`.
///
/// 6.7.6 Declarators
fn parse_declarator(
&mut self,
current: Type,
decl: ast::DeclaratorType,
location: Location,
) -> Type {
use crate::data::ast::DeclaratorType::*;
use crate::data::types::{ArrayType, FunctionType};
let _guard = self.recursion_check();
match decl {
End => current,
Pointer { to, qualifiers } => {
use UnitSpecifier::*;
let inner = self.parse_declarator(current, *to, location);
// we reuse `count_specifiers` even though we really only want the qualifiers
let (counter, compounds) =
count_specifiers(qualifiers, &mut self.error_handler, location);
// *const volatile
// TODO: this shouldn't allow `inline` or `_Noreturn`
let qualifiers = Qualifiers {
c_const: counter.get(&Const).is_some(),
volatile: counter.get(&Volatile).is_some(),
func: FunctionQualifiers {
inline: counter.get(&Inline).is_some(),
no_return: counter.get(&NoReturn).is_some(),
},
};
for &q in counter.keys() {
if !q.is_qualifier() {
// *extern
self.err(SemanticError::NotAQualifier(q.into()), location);
}
}
for spec in compounds {
// *struct s {}
self.err(SemanticError::NotAQualifier(spec), location);
}
Type::Pointer(Box::new(inner), qualifiers)
}
Array { of, size } => {
// int a[5]
let size = if let Some(expr) = size {
let size = Self::const_uint(self.expr(*expr)).unwrap_or_else(|err| {
self.error_handler.push_back(err);
1
});
ArrayType::Fixed(size)
} else {
// int a[]
ArrayType::Unbounded
};
let of = self.parse_declarator(current, *of, location);
// int a[]()
if let Type::Function(_) = &of {
self.err(SemanticError::ArrayStoringFunction(of.clone()), location);
}
Type::Array(Box::new(of), size)
}
Function(func) => {
// TODO: give a warning for `const int f();` somewhere
let return_type = self.parse_declarator(current, *func.return_type, location);
match &return_type {
// int a()[]
Type::Array(_, _) => self.err(
SemanticError::IllegalReturnType(return_type.clone()),
location,
),
// int a()()
Type::Function(_) => self.err(
SemanticError::IllegalReturnType(return_type.clone()),
location,
),
_ => {}
}
let mut names = HashSet::new();
let mut params = Vec::new();
for param in func.params {
// TODO: this location should be that of the param, not of the function
let mut param_type =
self.parse_type(param.specifiers, param.declarator.decl, location);
// `int f(int a[])` -> `int f(int *a)`
if let Type::Array(to, _) = param_type.ctype {
param_type.ctype = Type::Pointer(to, Qualifiers::default());
}
// C11 Standard 6.7.6.3 paragraph 8
// "A declaration of a parameter as 'function returning type' shall be
// adjusted to 'pointer to function returning type', as in 6.3.2.1."
// `int f(int g())` -> `int f(int (*g)())`
if param_type.ctype.is_function() {
param_type.ctype =
Type::Pointer(Box::new(param_type.ctype), Qualifiers::default());
}
// int a(extern int i)
if let Some(sc) = param_type.storage_class {
self.err(SemanticError::ParameterStorageClass(sc), location);
}
let id = if let Some(name) = param.declarator.id {
// int f(int a, int a)
if names.contains(&name) {
self.err(SemanticError::DuplicateParameter(name), location)
}
names.insert(name);
name
} else {
// int f(int)
InternedStr::default()
};
let meta = Variable {
ctype: param_type.ctype,
id,
qualifiers: param_type.qualifiers,
storage_class: StorageClass::Auto,
};
params.push(meta);
}
// int f(void);
let is_void = match params.as_slice() {
[Variable {
ctype: Type::Void, ..
}] => true,
_ => false,
};
// int f(void, int) or int f(int, void) or ...
if !is_void
&& params.iter().any(|param| match param.ctype {
Type::Void => true,
_ => false,
})
{
self.err(SemanticError::InvalidVoidParameter, location);
// int f(void, ...)
} else if func.varargs && is_void {
self.err(SemanticError::VoidVarargs, location);
// int f(...)
} else if func.varargs && params.is_empty() {
self.err(SemanticError::VarargsWithoutParam, location);
}
Type::Function(FunctionType {
params: params.into_iter().map(|m| m.insert()).collect(),
return_type: Box::new(return_type),
varargs: func.varargs,
})
}
}
}
// used for arrays like `int a[BUF_SIZE - 1];` and enums like `enum { A = 1 }`
fn const_literal(expr: Expr) -> CompileResult<Literal> {
let location = expr.location;
expr.const_fold()?.into_literal().or_else(|runtime_expr| {
Err(Locatable::new(
SemanticError::NotConstant(runtime_expr).into(),
location,
))
})
}
/// Return an unsigned integer that can be evaluated at compile time, or an error otherwise.
fn const_uint(expr: Expr) -> CompileResult<crate::arch::SIZE_T> {
use Literal::*;
let location = expr.location;
match Self::const_literal(expr)? {
UnsignedInt(i) => Ok(i),
Int(i) => {
if i < 0 {
Err(Locatable::new(
SemanticError::NegativeLength.into(),
location,
))
} else {
Ok(i as u64)
}
}
Char(c) => Ok(c.into()),
Str(_) | Float(_) => Err(Locatable::new(
SemanticError::NonIntegralLength.into(),
location,
)),
}
}
/// Return a signed integer that can be evaluated at compile time, or an error otherwise.
fn const_sint(expr: Expr) -> CompileResult<i64> {
use Literal::*;
let location = expr.location;
match Self::const_literal(expr)? {
UnsignedInt(u) => match u.try_into() {
Ok(i) => Ok(i),
Err(_) => Err(Locatable::new(
SemanticError::ConstOverflow { is_positive: true }.into(),
location,
)),
},
Int(i) => Ok(i),
Char(c) => Ok(c.into()),
Str(_) | Float(_) => Err(Locatable::new(
SemanticError::NonIntegralLength.into(),
location,
)),
}
}
/// Given some variable that we've already parsed (`decl`), perform various checks and add it to the current scope.
///
/// In particular, this checks that
/// - for any function `main()`, it has a signature compatible with that required by the C standard
/// - either this variable has not yet been seen in this scope
/// - or it is a global variable that is compatible with the previous declaration (see below)
///
/// This returns an opaque index to the `Metadata`.
fn declare(&mut self, mut decl: Variable, init: bool, location: Location) -> Symbol {
if decl.id == "main".into() {
if let Type::Function(ftype) = &decl.ctype {
// int main(int)
if !ftype.is_main_func_signature() {
self.err(SemanticError::IllegalMainSignature, location);
}
}
}
// e.g. extern int i = 1;
// this is a silly thing to do, but valid: https://stackoverflow.com/a/57900212/7669110
if decl.storage_class == StorageClass::Extern && !decl.ctype.is_function() && init {
self.warn(Warning::ExtraneousExtern, location);
decl.storage_class = StorageClass::Auto;
}
let id = decl.id;
let symbol = decl.insert();
if let Some(existing_ref) = self.scope.insert(id, symbol) {
let existing = existing_ref.get();
let meta = symbol.get();
// 6.2.2p4
// > For an identifier declared with the storage-class specifier extern in a scope in which a prior declaration of that identifier is visible,
// > if the prior declaration specifies internal or external linkage,
// > the linkage of the identifier at the later declaration is the same as the linkage specified at the prior declaration.
// > If no prior declaration is visible, or if the prior declaration specifies no linkage, then the identifier has external linkage.
//
// i.e. `static int f(); int f();` is the same as `static int f(); static int f();`
// special case redefining the same type
if self.scope.is_global()
// int i; int i;
&& (existing == meta
// `static int i; extern int i;` or `int i; extern int i;`
|| ((existing.storage_class == StorageClass::Static
|| existing.storage_class == StorageClass::Auto)
&& meta.storage_class == StorageClass::Extern)
// 6.2.2
// > For an identifier declared with the storage-class specifier extern ...
// > If no prior declaration is visible ... then the identifier has external linkage.
// and also
// > 3 If the declaration of a file scope identifier for an object contains the storage- class specifier static, the identifier has internal linkage.
// so since
// > If, within a translation unit, the same identifier appears with both internal and external linkage, the behavior is undefined.
// extern int i; int i;
|| (existing.storage_class == StorageClass::Extern && meta.storage_class != StorageClass::Static))
{
// int i = 1; int i = 2;
if init && self.initialized.contains(&existing_ref) {
self.err(SemanticError::Redefinition(id), location);
}
} else {
// extern int i; static int i;
let err = SemanticError::IncompatibleRedeclaration(id, existing_ref, symbol);
self.err(err, location);
}
}
symbol
}
}
impl types::FunctionType {
// check if this is a valid signature for 'main'
fn is_main_func_signature(&self) -> bool {
// main must return 'int' and must not be variadic
if *self.return_type != Type::Int(true) || self.varargs {
return false;
}
// allow 'main()''
if self.params.is_empty() {
return true;
}
// so the borrow-checker doesn't complain
let meta: Vec<_> = self.params.iter().map(|param| param.get()).collect();
let types: Vec<_> = meta.iter().map(|param| ¶m.ctype).collect();
match types.as_slice() {
// allow 'main(void)'
[Type::Void] => true,
// TODO: allow 'int main(int argc, char *argv[], char *environ[])'
[Type::Int(true), Type::Pointer(t, _)] | [Type::Int(true), Type::Array(t, _)] => {
match &**t {
Type::Pointer(inner, _) => inner.is_char(),
_ => false,
}
}
_ => false,
}
}
}
impl Type {
#[inline]
fn is_char(&self) -> bool {
match self {
Type::Char(true) => true,
_ => false,
}
}
}
/// Analyze a single function
///
/// This is separate from `Analyzer` so that `metadata` does not have to be an `Option`.
struct FunctionAnalyzer<'a> {
/// the function we are currently compiling.
/// used for checking return types
metadata: FunctionData,
/// We need this for the scopes, as well as for parsing expressions
analyzer: &'a mut PureAnalyzer,
}
#[derive(Debug)]
/// used to keep track of function metadata
/// while doing semantic analysis
struct FunctionData {
/// the name of the function
id: InternedStr,
/// where the function was declared
location: Location,
/// the return type of the function
return_type: Type,
}
impl FunctionAnalyzer<'_> {
/// Performs semantic analysis on the function and adds it to `METADATA_STORE`.
/// Returns the analyzed statements.
fn analyze(
func: ast::FunctionDefinition,
analyzer: &mut PureAnalyzer,
location: Location,
) -> (Symbol, Vec<Stmt>) {
let parsed_func = analyzer.parse_type(func.specifiers, func.declarator.into(), location);
// rcc ignores `inline` and `_Noreturn`
if parsed_func.qualifiers != Qualifiers::default() {
analyzer.error_handler.warn(
Warning::FunctionQualifiersIgnored(parsed_func.qualifiers),
location,
);
}
let sc = match parsed_func.storage_class {
None => StorageClass::Extern,
Some(sc @ StorageClass::Extern) | Some(sc @ StorageClass::Static) => sc,
// auto int f();
Some(other) => {
analyzer.err(SemanticError::InvalidFuncStorageClass(other), location);
StorageClass::Extern
}
};
let metadata = Variable {
ctype: parsed_func.ctype.clone(),
id: func.id,
qualifiers: parsed_func.qualifiers,
storage_class: sc,
};
let symbol = analyzer.declare(metadata, true, location);
let func_type = match parsed_func.ctype {
Type::Function(ftype) => ftype,
_ => unreachable!(),
};
// used for figuring out what casts `return 1;` should make
let tmp_metadata = FunctionData {
location,
id: func.id,
return_type: *func_type.return_type,
};
assert!(analyzer.scope.is_global());
assert!(analyzer.tag_scope.is_global());
let mut func_analyzer = FunctionAnalyzer {
metadata: tmp_metadata,
analyzer,
};
func_analyzer.enter_scope();
for (i, param) in func_type.params.into_iter().enumerate() {
let meta = param.get();
if meta.id == InternedStr::default() && meta.ctype != Type::Void {
// int f(int) {}
func_analyzer.err(
SemanticError::MissingParamName(i, meta.ctype.clone()),
location,
);
}
// TODO: I think this should go through `declare` instead,
// but that requires having a mutable `Metadata`
func_analyzer.analyzer.scope.insert(meta.id, param);
}
let stmts = func
.body
.into_iter()
.map(|s| func_analyzer.parse_stmt(s))
.collect();
// TODO: this location should be the end of the function, not the start
func_analyzer.leave_scope(location);
assert!(analyzer.tag_scope.is_global());
assert!(analyzer.scope.is_global());
(symbol, stmts)
}
}
impl FunctionAnalyzer<'_> {
fn err(&mut self, err: SemanticError, location: Location) {
self.analyzer.err(err, location);
}
fn enter_scope(&mut self) {
self.analyzer.scope.enter();
self.analyzer.tag_scope.enter();
}
fn leave_scope(&mut self, location: Location) {
for object in self.analyzer.scope.get_all_immediate().values() {
let object = object.get();
match &object.ctype {
Type::Struct(StructType::Named(name, members))
| Type::Union(StructType::Named(name, members)) => {
if members.get().is_empty()
// `extern struct s my_s;` and `typedef struct s S;` are fine
&& object.storage_class != StorageClass::Extern
&& object.storage_class != StorageClass::Typedef
{
// struct s my_s;
self.analyzer.error_handler.error(
SemanticError::ForwardDeclarationIncomplete(*name, object.id),
location,
);
}
}
_ => {}
}
}
self.analyzer.scope.exit();
self.analyzer.tag_scope.exit();
}
}
struct ParsedType {
// needs to be option because the default varies greatly depending on the context
storage_class: Option<StorageClass>,
qualifiers: Qualifiers,
ctype: Type,
// TODO: this is fishy
declared_compound_type: bool,
}
use ast::{DeclarationSpecifier, UnitSpecifier};
fn count_specifiers(
specifiers: Vec<DeclarationSpecifier>,
error_handler: &mut ErrorHandler,
location: Location,
) -> (Counter<UnitSpecifier, usize>, Vec<DeclarationSpecifier>) {
use DeclarationSpecifier::*;
use UnitSpecifier::*;
let mut counter = Counter::<_, usize>::new();
let mut compounds = Vec::new();
for spec in specifiers {
match spec {
Unit(u) => counter.update(std::iter::once(u)),
_ => compounds.push(spec),
}
}
for (&spec, &count) in counter.iter() {
if spec != Long && count > 1 {
if spec.is_type() {
let err = SemanticError::InvalidSpecifier {
existing: spec.into(),
new: spec.into(),
};
error_handler.error(err, location);
} else {
error_handler.warn(Warning::DuplicateSpecifier(spec, count), location);
}
}
}
(counter, compounds)
}
impl UnitSpecifier {
fn is_qualifier(self) -> bool {
use UnitSpecifier::*;
match self {
Const | Volatile | Restrict | Inline | NoReturn => true,
_ => false,
}
}
/// Returns whether this is a self-contained type, not just whether this modifies a type.
/// For example, `int` and `long` are self-contained types, but `unsigned` and `_Complex` are not.
/// This is despite the fact that `unsigned i;` is valid and means `unsigned int i;`
fn is_type(self) -> bool {
use UnitSpecifier::*;
match self {
Bool | Char | Int | Long | Float | Double | VaList => true,
_ => false,
}
}
}
#[cfg(test)]
pub(crate) mod test {
use super::{Error, *};
use crate::data::types::{ArrayType, FunctionType, Type::*};
use crate::lex::PreProcessor;
use crate::parse::test::*;
pub(crate) fn analyze<'c, 'input: 'c, P, A, R, S, E>(
input: &'input str,
parse_func: P,
analyze_func: A,
) -> CompileResult<R>
where
P: Fn(&mut Parser<PreProcessor<'c>>) -> Result<S, E>,
A: Fn(&mut PureAnalyzer, S) -> R,
CompileError: From<E>,
{
let mut p = parser(input);
let ast = parse_func(&mut p)?;
let mut a = PureAnalyzer::new();
let e = analyze_func(&mut a, ast);
if let Some(err) = a.error_handler.pop_front() {
return Err(err);
}
Ok(e)
}
fn maybe_decl(s: &str) -> Option<CompileResult<Declaration>> {
decls(s).into_iter().next()
}
pub(crate) fn decl(s: &str) -> CompileResult<Declaration> {
maybe_decl(s).unwrap_or_else(|| panic!("expected a declaration or error: '{}'", s))
}
pub(crate) fn decls(s: &str) -> Vec<CompileResult<Declaration>> {
Analyzer::new(parser(s), false)
.map(|o| o.map(|l| l.data))
.collect()
}
pub(crate) fn assert_errs_decls(input: &str, errs: usize, warnings: usize, decls: usize) {
let mut a = Analyzer::new(parser(input), false);
let (mut a_errs, mut a_decls) = (0, 0);
for res in &mut a {
if res.is_err() {
a_errs += 1;
} else {
a_decls += 1;
}
}
let a_warns = a.inner.error_handler.warnings.len();
if (a_errs, a_warns, a_decls) != (errs, warnings, decls) {
println!(
"({} errs, {} warnings, {} decls) != ({}, {}, {}) when parsing {}",
a_errs, a_warns, a_decls, errs, warnings, decls, input
);
println!("note: warnings:");
for warning in a.inner.error_handler.warnings {
println!("- {}", warning.data);
}
};
}
pub(crate) fn analyze_expr(s: &str) -> CompileResult<Expr> {
analyze(s, Parser::expr, PureAnalyzer::expr)
}
pub(crate) fn assert_decl_display(left: &str, right: &str) {
assert_eq!(decl(left).unwrap().to_string(), right);
}
fn assert_extern_decl_display(s: &str) {
// TODO: this `auto` is such a hack
assert_decl_display(s, &format!("{}", s));
}
pub(super) fn assert_same(left: &str, right: &str) {
assert_eq!(
decl(left).unwrap().to_string(),
decl(right).unwrap().to_string()
);
}
pub(crate) fn assert_no_change(s: &str) {
assert_decl_display(s, s);
}
fn match_type(lexed: CompileResult<Declaration>, given_type: Type) -> bool {
fn type_helper(ctype: &Type, given_type: &Type) -> bool {
match (ctype, given_type) {
// because the parameters use `MetadataRef`,
// it's impossible to have the same ref twice, even in unit tess
(Type::Function(actual), Type::Function(expected)) => {
// TODO: this only handles one level of function nesting
actual
.params
.iter()
.zip(&expected.params)
.all(|(left, right)| metadata_helper(&left.get(), &right.get()))
&& {
println!("all params match");
true
}
&& dbg!(type_helper(&actual.return_type, &expected.return_type))
&& dbg!(actual.varargs == expected.varargs)
}
(Type::Pointer(a, lq), Type::Pointer(b, rq)) => type_helper(&*a, &*b) && lq == rq,
(Type::Array(a, la), Type::Array(b, ra)) => type_helper(&*a, &*b) && la == ra,
(a, b) => a == b,
}
}
fn metadata_helper(left: &Variable, right: &Variable) -> bool {
dbg!(type_helper(dbg!(&left.ctype), dbg!(&right.ctype)))
&& left.storage_class == right.storage_class
&& left.qualifiers == right.qualifiers
&& left.id == right.id
}
lexed.map_or(false, |decl| {
type_helper(&decl.symbol.get().ctype, &given_type)
})
}
#[test]
fn no_name_should_be_syntax_error() {
match decl("int *;").unwrap_err().data {
Error::Syntax(_) => {}
_ => panic!("expected syntax error"),
}
}
#[test]
fn storage_class() {
assert_extern_decl_display("int i;");
assert_eq!(
decl("extern int i;").unwrap().symbol.get().storage_class,
StorageClass::Extern
);
assert_eq!(
decl("static int i;").unwrap().symbol.get().storage_class,
StorageClass::Static
);
match decl("auto int i;").unwrap_err().data {
Error::Semantic(SemanticError::AutoAtGlobalScope) => {}
_ => panic!("wrong error"),
}
}
#[test]
fn function() {
assert_extern_decl_display("int f();");
assert_extern_decl_display("int f(int i);");
assert_extern_decl_display("int f(int i, int j);");
// functions decay to pointers when used as parameters
assert_same("int f(int g());", "int f(int (*g)());");
assert_same("int f(int g(), ...);", "int f(int (*g)(), ...);");
}
#[test]
fn test_decl_specifiers() {
assert!(match_type(decl("char i;"), Type::Char(true)));
assert!(match_type(decl("unsigned char i;"), Type::Char(false)));
assert!(match_type(decl("signed short i;"), Type::Short(true)));
assert!(match_type(decl("unsigned short i;"), Type::Short(false)));
assert!(match_type(decl("long i;"), Type::Long(true)));
assert!(match_type(decl("long long i;"), Type::Long(true)));
assert!(match_type(decl("long unsigned i;"), Type::Long(false)));
assert!(match_type(decl("int i;"), Type::Int(true)));
assert!(match_type(decl("signed i;"), Type::Int(true)));
assert!(match_type(decl("unsigned i;"), Type::Int(false)));
assert!(match_type(decl("float f;"), Type::Float));
assert!(match_type(decl("double d;"), Type::Double));
assert!(match_type(decl("long double d;"), Type::Double));
assert!(match_type(
decl("void f();"),
Type::Function(FunctionType {
return_type: Box::new(Type::Void),
params: vec![],
varargs: false
})
));
assert!(match_type(decl("const volatile int f;"), Type::Int(true)));
assert!(match_type(decl("long double d;"), Type::Double));
assert!(match_type(decl("short int i;"), Type::Short(true)));
assert!(match_type(decl("long int i;"), Type::Long(true)));
assert!(match_type(decl("long long int i;"), Type::Long(true)));
}
#[test]
fn test_bad_decl_specs() {
assert!(maybe_decl("int;").is_none());
for s in &[
"char char i;",
"char int i",
"_Bool int i",
"float int i",
"char long i;",
"long char i;",
"float char i;",
"float double i;",
"double double i;",
"double unsigned i;",
"short double i;",
"int void i;",
"void int i;",
] {
assert!(decl(s).is_err(), "'{}' should be an error", s);
}
// default to int if we don't have a type
// don't panic if we see duplicate specifiers
assert!(match_type(decl("unsigned unsigned i;"), Type::Int(false)));
assert!(match_type(decl("extern extern i;"), Type::Int(true)));
assert!(match_type(decl("const const i;"), Type::Int(true)));
assert!(match_type(decl("const volatile i;"), Type::Int(true)));
}
#[test]
fn test_arrays() {
assert!(match_type(
decl("int a[];"),
Array(Box::new(Int(true)), ArrayType::Unbounded)
));
assert!(match_type(
decl("unsigned a[];"),
Array(Box::new(Int(false)), ArrayType::Unbounded)
));
assert!(match_type(
decl("_Bool a[][][];"),
Array(
Box::new(Array(
Box::new(Array(Box::new(Bool), ArrayType::Unbounded)),
ArrayType::Unbounded
)),
ArrayType::Unbounded
)
));
assert_extern_decl_display("int a[1];");
assert_same("int a[(int)1];", "int a[1];");
}
#[test]
fn test_pointers() {
for &pointer in &[
"void *a;",
"float *const a;",
"double *volatile *const a;",
"double *volatile *const a;",
"_Bool *const volatile a;",
] {
assert_extern_decl_display(pointer);
}
}
#[test]
fn test_pointers_and_arrays() {
// cdecl: declare foo as array 10 of pointer to pointer to char
assert!(match_type(
decl("char **foo[10];"),
Array(
Box::new(Pointer(
Box::new(Pointer(Box::new(Char(true)), Qualifiers::default(),)),
Qualifiers::default(),
)),
ArrayType::Fixed(10),
)
));
// cdecl: declare foo as pointer to pointer to array 10 of int
assert!(match_type(
decl("int (**foo)[10];"),
Pointer(
Box::new(Pointer(
Box::new(Array(Box::new(Int(true)), ArrayType::Fixed(10),)),
Qualifiers::default(),
)),
Qualifiers::default(),
)
));
}
#[test]
fn test_functions() {
assert!(match_type(
decl("void *f();"),
Function(FunctionType {
return_type: Box::new(Pointer(Box::new(Type::Void), Qualifiers::default())),
params: vec![],
varargs: false,
})
));
// cdecl: declare i as pointer to function returning int;
assert!(match_type(
decl("int (*i)();"),
Pointer(
Box::new(Function(FunctionType {
return_type: Box::new(Int(true)),
params: vec![],
varargs: false,
})),
Qualifiers::default()
)
));
// cdecl: declare i as pointer to function (int, char, float) returning int
assert_no_change("extern int (*i)(int, char, float);");
// cdecl: declare i as pointer to function (pointer to function returning int) returning int
assert!(match_type(
decl("int (*i)(int (*f)());"),
Pointer(
Box::new(Function(FunctionType {
return_type: Box::new(Int(true)),
params: vec![Variable {
id: InternedStr::get_or_intern("f"),
ctype: Pointer(
Box::new(Function(FunctionType {
return_type: Box::new(Int(true)),
params: vec![],
varargs: false
})),
Qualifiers::default()
),
qualifiers: Default::default(),
storage_class: Default::default(),
}
.insert()],
varargs: false,
})),
Qualifiers::default()
)
));
assert!(match_type(
decl("int f(int, ...);"),
Function(FunctionType {
return_type: Box::new(Int(true)),
params: vec![Variable {
id: Default::default(),
ctype: Int(true),
qualifiers: Default::default(),
storage_class: Default::default()
}
.insert()],
varargs: true,
})
));
}
#[test]
fn test_functions_array_parameter_static() {
assert!(match_type(
decl("void f(int a[static 5]);"),
Function(FunctionType {
return_type: Box::new(Void),
params: vec![Variable {
id: InternedStr::get_or_intern("a"),
ctype: Pointer(Box::new(Int(true)), Qualifiers::default()),
qualifiers: Default::default(),
storage_class: Default::default(),
}
.insert()],
varargs: false
})
));
assert!(decl("int b[static 10];").is_err());
}
#[test]
fn test_inline_keyword() {
// Correct usage
assert!(match_type(
decl("inline void f(void);"),
Function(FunctionType {
return_type: Box::new(Void),
params: vec![Variable {
id: InternedStr::default(),
ctype: Type::Void,
qualifiers: Qualifiers::default(),
storage_class: StorageClass::default(),
}
.insert()],
varargs: false,
})
));
// `inline` is not allowed in the following cases
assert!(decl("inline int a;").is_err()); // Normal declarations
assert!(decl("void f(inline int a);").is_err()); // Parameter lists
assert!(decl("struct F { inline int a; } f;").is_err()); // Struct members
assert!(
// Type names
decl("int main() { char a = (inline char)(4); }").is_err()
);
assert!(decl("typedef a inline int;").is_err());
}
#[test]
fn test_complex() {
// cdecl: declare bar as const pointer to array 10 of pointer to function (int) returning volatile pointer to char
assert!(match_type(
decl("char * volatile (*(* const bar)[])(int );"),
Pointer(
Box::new(Array(
Box::new(Pointer(
Box::new(Function(FunctionType {
return_type: Box::new(Pointer(
Box::new(Char(true)),
Qualifiers {
volatile: true,
..Qualifiers::default()
}
)),
params: vec![Variable {
ctype: Int(true),
storage_class: Default::default(),
id: Default::default(),
qualifiers: Qualifiers::NONE,
}
.insert()],
varargs: false,
})),
Qualifiers::default()
)),
ArrayType::Unbounded,
)),
Qualifiers {
c_const: true,
..Qualifiers::default()
}
)
));
// cdecl: declare foo as pointer to function (void) returning pointer to array 3 of int
assert!(match_type(
decl("int (*(*foo)(void))[];"),
Pointer(
Box::new(Function(FunctionType {
return_type: Box::new(Pointer(
Box::new(Array(Box::new(Int(true)), ArrayType::Unbounded)),
Qualifiers::default()
)),
params: vec![Variable {
ctype: Void,
storage_class: Default::default(),
id: Default::default(),
qualifiers: Default::default(),
}
.insert()],
varargs: false,
})),
Qualifiers::default()
)
));
// cdecl: declare bar as volatile pointer to array 64 of const int
assert!(match_type(
decl("const int (* volatile bar)[];"),
Pointer(
Box::new(Array(Box::new(Int(true)), ArrayType::Unbounded)),
Qualifiers {
volatile: true,
..Qualifiers::default()
}
)
));
// cdecl: declare x as function returning pointer to array 5 of pointer to function returning char
assert!(match_type(
decl("char (*(*x())[])();"),
Function(FunctionType {
return_type: Box::new(Pointer(
Box::new(Array(
Box::new(Pointer(
Box::new(Function(FunctionType {
return_type: Box::new(Char(true)),
params: vec![],
varargs: false,
})),
Qualifiers::default()
)),
ArrayType::Unbounded
)),
Qualifiers::default()
)),
params: vec![],
varargs: false,
})
));
}
#[test]
fn test_multiple() {
assert_same("int i, j, k;", "int i; int j; int k;");
assert_same(
"char *p, c, **pp, f();",
"char *p; char c; char **p; char f();",
);
}
#[test]
fn test_no_specifiers() {
assert_same("i, j, k;", "int i, j, k;");
assert_same("*p, c, **pp, f();", "int *p, c, **pp, f();");
}
#[test]
fn test_decl_errors() {
// no semicolon
assert!(decl("int").is_err());
assert!(decl("int i").is_err());
// type error: cannot have array of functions or function returning array
assert!(decl("int f()[];").is_err());
assert!(decl("int f[]();").is_err());
assert!(decl("int f()();").is_err());
assert!(decl("int (*f)[;").is_err());
// duplicate parameter name
assert!(decl("int f(int a, int a);").is_err());
}
#[test]
fn default_type_specifier_warns() {
let default_type_decls = &[
"i;",
"f();",
"a[1];",
"(*fp)();",
"(i);",
"((*f)());",
"(a[1]);",
"(((((((((i)))))))));",
];
for decl in default_type_decls {
assert_errs_decls(decl, 0, 1, 1);
}
}
#[test]
fn extern_redeclaration_of_static_fn_does_not_error() {
assert_same(
"static int f(); int f();",
"static int f(); extern int f();",
);
// However the opposite should still error
assert_errs_decls(
"extern int f();
static int f();",
1,
0,
2,
);
}
#[test]
fn enum_declaration() {
assert!(decl("enum;").is_err());
assert!(decl("enum e;").is_err());
assert!(decl("enum e {};").is_err());
assert!(decl("enum e { A }").is_err());
assert!(maybe_decl("enum { A };").is_none());
assert!(match_type(
decl("enum { A } E;"),
Type::Enum(None, vec![("A".into(), 0)])
));
assert!(match_type(
decl("enum e { A = 1, B } E;"),
Type::Enum(Some("e".into()), vec![("A".into(), 1), ("B".into(), 2)])
));
assert!(match_type(
decl("enum { A = -5, B, C = 2, D } E;"),
Type::Enum(
None,
vec![
("A".into(), -5),
("B".into(), -4),
("C".into(), 2),
("D".into(), 3)
]
)
));
}
#[test]
fn typedef_signed() {
let mut ds = decls("typedef unsigned uint; uint i;").into_iter();
assert_eq!(
ds.next().unwrap().unwrap().to_string(),
"typedef unsigned int uint;"
);
assert_decl_display("unsigned int i;", &ds.next().unwrap().unwrap().to_string());
}
#[test]
fn bitfields() {
assert!(decl("struct { int:5; } a;").is_err());
assert!(decl("struct { int a:5; } b;").is_ok());
assert!(decl("struct { int a:5, b:6; } c;").is_ok());
assert!(decl("struct { extern int a:5; } d;").is_err());
}
#[test]
fn lol() {
let lol = "
int *jynelson(int(*fp)(int)) {
return 0;
}
int f(int i) {
return 0;
}
int main() {
return *((int*(*)(int(*)(int)))jynelson)(&f);
}
";
assert!(parse_all(lol).iter().all(Result::is_ok));
}
#[test]
fn redefinition_is_err() {
assert_errs_decls("int i = 1, i = 2;", 1, 0, 2);
}
#[test]
fn void() {
assert_no_change("extern int f(void);");
assert_no_change("extern int f(int);");
assert!(decl("int f(int, void);").is_err());
assert!(decl("int f(void, int);").is_err());
assert!(decl("int f(void, void);").is_err());
assert!(decl("int f(int) { return 1; }").is_err());
assert_decl_display(
"int f(void) { return 1; }",
"extern int f(void) {\n return (int)(1);\n}\n",
);
}
}
|
#[doc = "Reader of register RCC_SDMMC12CKSELR"]
pub type R = crate::R<u32, super::RCC_SDMMC12CKSELR>;
#[doc = "Writer for register RCC_SDMMC12CKSELR"]
pub type W = crate::W<u32, super::RCC_SDMMC12CKSELR>;
#[doc = "Register RCC_SDMMC12CKSELR `reset()`'s with value 0x03"]
impl crate::ResetValue for super::RCC_SDMMC12CKSELR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x03
}
}
#[doc = "SDMMC12SRC\n\nValue on reset: 3"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum SDMMC12SRC_A {
#[doc = "0: hclk6 clock selected as kernel\r\n peripheral clock (default after\r\n reset)"]
B_0X0 = 0,
#[doc = "1: pll3_r_ck clock selected as kernel\r\n peripheral clock"]
B_0X1 = 1,
#[doc = "2: pll4_p_ck clock selected as kernel\r\n peripheral clock"]
B_0X2 = 2,
#[doc = "3: hsi_ker_ck clock selected as kernel\r\n peripheral clock"]
B_0X3 = 3,
}
impl From<SDMMC12SRC_A> for u8 {
#[inline(always)]
fn from(variant: SDMMC12SRC_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `SDMMC12SRC`"]
pub type SDMMC12SRC_R = crate::R<u8, SDMMC12SRC_A>;
impl SDMMC12SRC_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, SDMMC12SRC_A> {
use crate::Variant::*;
match self.bits {
0 => Val(SDMMC12SRC_A::B_0X0),
1 => Val(SDMMC12SRC_A::B_0X1),
2 => Val(SDMMC12SRC_A::B_0X2),
3 => Val(SDMMC12SRC_A::B_0X3),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `B_0X0`"]
#[inline(always)]
pub fn is_b_0x0(&self) -> bool {
*self == SDMMC12SRC_A::B_0X0
}
#[doc = "Checks if the value of the field is `B_0X1`"]
#[inline(always)]
pub fn is_b_0x1(&self) -> bool {
*self == SDMMC12SRC_A::B_0X1
}
#[doc = "Checks if the value of the field is `B_0X2`"]
#[inline(always)]
pub fn is_b_0x2(&self) -> bool {
*self == SDMMC12SRC_A::B_0X2
}
#[doc = "Checks if the value of the field is `B_0X3`"]
#[inline(always)]
pub fn is_b_0x3(&self) -> bool {
*self == SDMMC12SRC_A::B_0X3
}
}
#[doc = "Write proxy for field `SDMMC12SRC`"]
pub struct SDMMC12SRC_W<'a> {
w: &'a mut W,
}
impl<'a> SDMMC12SRC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SDMMC12SRC_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "hclk6 clock selected as kernel peripheral clock (default after reset)"]
#[inline(always)]
pub fn b_0x0(self) -> &'a mut W {
self.variant(SDMMC12SRC_A::B_0X0)
}
#[doc = "pll3_r_ck clock selected as kernel peripheral clock"]
#[inline(always)]
pub fn b_0x1(self) -> &'a mut W {
self.variant(SDMMC12SRC_A::B_0X1)
}
#[doc = "pll4_p_ck clock selected as kernel peripheral clock"]
#[inline(always)]
pub fn b_0x2(self) -> &'a mut W {
self.variant(SDMMC12SRC_A::B_0X2)
}
#[doc = "hsi_ker_ck clock selected as kernel peripheral clock"]
#[inline(always)]
pub fn b_0x3(self) -> &'a mut W {
self.variant(SDMMC12SRC_A::B_0X3)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x07) | ((value as u32) & 0x07);
self.w
}
}
impl R {
#[doc = "Bits 0:2 - SDMMC12SRC"]
#[inline(always)]
pub fn sdmmc12src(&self) -> SDMMC12SRC_R {
SDMMC12SRC_R::new((self.bits & 0x07) as u8)
}
}
impl W {
#[doc = "Bits 0:2 - SDMMC12SRC"]
#[inline(always)]
pub fn sdmmc12src(&mut self) -> SDMMC12SRC_W {
SDMMC12SRC_W { w: self }
}
}
|
use crate::{parser::Parser, syntax::SyntaxKind};
use drop_bomb::DropBomb;
#[derive(Debug)]
pub struct Marker {
pos: u32,
bomb: DropBomb,
}
impl Marker {
pub(super) fn new(pos: u32) -> Marker {
Marker {
pos,
bomb: DropBomb::new("Marker must be either completed or abandoned"),
}
}
pub fn complete(mut self, p: &mut Parser, kind: SyntaxKind) -> CompletedMarker {
self.bomb.defuse();
p.complete(self.pos, kind);
CompletedMarker::new(self.pos, kind)
}
pub fn abandon(mut self, p: &mut Parser) {
self.bomb.defuse();
p.abandon(self.pos);
}
}
#[derive(Debug)]
pub struct CompletedMarker(u32, SyntaxKind);
impl CompletedMarker {
fn new(pos: u32, kind: SyntaxKind) -> Self {
CompletedMarker(pos, kind)
}
pub fn precede(self, p: &mut Parser) -> Marker {
Marker::new(p.precede(self.0))
}
pub fn kind(&self) -> SyntaxKind {
self.1
}
}
|
use crate::core::assets::protocol::{AssetLoadResult, AssetProtocol};
use std::str::from_utf8;
use svg::{
node::element::tag::{Type, SVG},
parser::Event,
};
pub struct SvgImageAsset {
bytes: Vec<u8>,
width: usize,
height: usize,
}
impl SvgImageAsset {
pub fn bytes(&self) -> &[u8] {
&self.bytes
}
pub fn width(&self) -> usize {
self.width
}
pub fn height(&self) -> usize {
self.height
}
}
pub struct SvgImageAssetProtocol;
impl AssetProtocol for SvgImageAssetProtocol {
fn name(&self) -> &str {
"svg"
}
fn on_load(&mut self, data: Vec<u8>) -> AssetLoadResult {
let content = from_utf8(&data).unwrap();
let mut width = 0;
let mut height = 0;
for event in svg::read(&content).unwrap() {
if let Event::Tag(SVG, Type::Start, attributes) = event {
let mut iter = attributes.get("viewBox").unwrap().split_whitespace();
let left = iter.next().unwrap().parse::<isize>().unwrap();
let top = iter.next().unwrap().parse::<isize>().unwrap();
let right = iter.next().unwrap().parse::<isize>().unwrap();
let bottom = iter.next().unwrap().parse::<isize>().unwrap();
width = (right - left) as usize;
height = (bottom - top) as usize;
break;
}
}
let content = content.replace("width=\"100%\"", &format!("width=\"{}\"", width));
let content = content.replace("height=\"100%\"", &format!("height=\"{}\"", height));
AssetLoadResult::Data(Box::new(SvgImageAsset {
bytes: content.into_bytes(),
width,
height,
}))
}
}
|
use hyper::{Response, Request, Client, Body};
use std::result::Result;
type HttpClient = Client<hyper::client::HttpConnector>;
use futures::stream::{TryStreamExt};
use async_std::fs::File;
use async_std::io::prelude::*;
use chrono::Utc;
pub async fn store_request(req: Request<Body>) -> Request<Body> {
let ct = match req.headers().contains_key(hyper::header::CONTENT_TYPE) {
true => format!("{:?}", req.headers()[hyper::header::CONTENT_TYPE]),
false => "".to_string()
};
if ct.contains("application/ocsp-request") {
return req;
}
let (parts, body) = req.into_parts();
let first_line = format!("{} {} {:?}\r\n", parts.method, parts.uri, parts.version);
let mut headers_lines = String::new();
for (key, val) in &parts.headers {
headers_lines += &format!("{}: {}\r\n", key.as_str(), String::from_utf8_lossy((*val).as_bytes()));
}
let entire_body = body
.try_fold(Vec::new(), |mut data, chunk| async move {
data.extend_from_slice(&chunk);
Ok(data)
})
.await.unwrap();
let body_string = match String::from_utf8(entire_body) {
Ok(v) => v,
_ => "Body contains not UTF-8 symbols.".to_string()
};
let now = Utc::now();
let host = match parts.uri.host() {
Some(h) => h,
None => "localhost"
};
let file_name = format!("./requests/{}|||{}|||{}|||{}", uuid::Uuid::new_v4(), parts.method, host, now.timestamp_millis());
let stored_req = format!("{}{}\r\n{}", first_line, headers_lines, body_string);
let body = Body::from(body_string);
let req = Request::from_parts(parts, body);
let mut file = match File::create(file_name).await {
Ok(f) => f,
Err(e) => {
println!("Не удалось сохранить запрос =c");
println!("Информация об ошибке: {}", e);
return req
},
};
if let Err(e) = file.write_all(stored_req.as_bytes()).await {
println!("Не удалось сохранить запрос =c");
println!("Информация об ошибке: {}", e);
}
req
}
pub async fn http_request(client: HttpClient, req: Request<Body>) -> Result<Response<Body>, hyper::Error> {
let req = store_request(req).await;
client.request(req).await
}
|
//! Tests auto-converted from "sass-spec/spec/libsass-todo-issues"
#[allow(unused)]
use super::rsass;
// From "sass-spec/spec/libsass-todo-issues/issue_1026.hrx"
#[test]
#[ignore] // wrong result
fn issue_1026() {
assert_eq!(
rsass(
"div {\
\n a {\
\n /**\
\n * a\
\n * multiline\
\n * comment\
\n */\
\n top: 10px;\
\n }\
\n}\
\n"
)
.unwrap(),
"div a {\
\n /**\
\n * a\
\n * multiline\
\n * comment\
\n */\
\n top: 10px;\
\n}\
\n"
);
}
// From "sass-spec/spec/libsass-todo-issues/issue_1096.hrx"
#[test]
#[ignore] // wrong result
fn issue_1096() {
assert_eq!(
rsass(
"// line-endings in this file must be CRLF\r\
\n@import url(\"foo\\\r\
\nbar\");\r\
\n@import url(\"foo\r\
\nbar\");\r\
\n@import url(foo\r\
\nbar);\r\
\n"
)
.unwrap(),
"@import url(\"foobar\");\
\n@import url(\"foo\\a bar\");\
\n@import url(foo bar);\
\n"
);
}
mod issue_1694;
mod issue_1732;
// From "sass-spec/spec/libsass-todo-issues/issue_1763.hrx"
#[test]
#[ignore] // wrong result
fn issue_1763() {
assert_eq!(
rsass(
"@import \"first.css\", \"second.css\" (max-width: 400px);\
\n@import \"first.scss\", \"second.scss\" (max-width: 400px);\
\n"
)
.unwrap(),
"@import \"first.css\";\
\n@import \"second.css\" (max-width: 400px);\
\n@import \"second.scss\" (max-width: 400px);\
\nfoo {\
\n bar: baz;\
\n}\
\n"
);
}
mod issue_1798;
mod issue_1801;
// Ignoring "issue_2016", tests with expected error not implemented yet.
mod issue_2023;
// From "sass-spec/spec/libsass-todo-issues/issue_2051.hrx"
// Ignoring "issue_2051", error tests are not supported yet.
// From "sass-spec/spec/libsass-todo-issues/issue_2096.hrx"
#[test]
fn issue_2096() {
assert_eq!(
rsass(
"@mixin foo() {\
\n @import \"https://foo\";\
\n}\
\n@include foo;\
\n"
)
.unwrap(),
"@import \"https://foo\";\
\n"
);
}
// From "sass-spec/spec/libsass-todo-issues/issue_221260.hrx"
// Ignoring "issue_221260", error tests are not supported yet.
// Ignoring "issue_221262.hrx", not expected to work yet.
// Ignoring "issue_221264", tests with expected error not implemented yet.
// Ignoring "issue_221267", tests with expected error not implemented yet.
// Ignoring "issue_221286", tests with expected error not implemented yet.
// Ignoring "issue_221292.hrx", not expected to work yet.
mod issue_2235;
mod issue_2295;
// From "sass-spec/spec/libsass-todo-issues/issue_238764.hrx"
// Ignoring "issue_238764", error tests are not supported yet.
// Ignoring "issue_245442", tests with expected error not implemented yet.
// Ignoring "issue_245446", tests with expected error not implemented yet.
// From "sass-spec/spec/libsass-todo-issues/issue_2818.hrx"
#[test]
#[ignore] // unexepected error
fn issue_2818() {
assert_eq!(
rsass(
"$map: (\"lightness\": 10%, \"saturation\": 10%);\
\n$base: call(get-function(\'scale-color\'), #dedede, $map...);\
\ntest { color: $base; }\
\n"
)
.unwrap(),
"test {\
\n color: #e4dede;\
\n}\
\n"
);
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use crypto::{BatchMerkleProof, ElementHasher, Hasher};
use math::{log2, FieldElement};
use utils::{
collections::Vec, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable,
SliceReader,
};
// QUERIES
// ================================================================================================
/// Decommitments to evaluations of a set of functions at multiple points.
///
/// Given a set of functions evaluated over a domain *D*, a commitment is assumed to be a Merkle
/// tree where a leaf at position *i* contains evaluations of all functions at *x<sub>i</sub>*.
/// Thus, a query (i.e. a single decommitment) for position *i* includes evaluations of all
/// functions at *x<sub>i</sub>*, accompanied by a Merkle authentication path from the leaf *i* to
/// the tree root.
///
/// This struct can contain one or more queries. In cases when more than one query is stored,
/// Merkle authentication paths are compressed to remove redundant nodes.
///
/// Internally, all Merkle paths and query values are stored as a sequence of bytes. Thus, to
/// retrieve query values and the corresponding Merkle authentication paths,
/// [parse()](Queries::parse) function should be used.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Queries {
paths: Vec<u8>,
values: Vec<u8>,
}
impl Queries {
// CONSTRUCTOR
// --------------------------------------------------------------------------------------------
/// Returns queries constructed from evaluations of a set of functions at some number of points
/// in a domain and their corresponding Merkle authentication paths.
///
/// For each evaluation point, the same number of values must be provided, and a hash of
/// these values must be equal to a leaf node in the corresponding Merkle authentication path.
///
/// # Panics
/// Panics if:
/// * No queries were provided (`query_values` is an empty vector).
/// * Any of the queries does not contain any evaluations.
/// * Not all queries contain the same number of evaluations.
pub fn new<H: Hasher, E: FieldElement>(
merkle_proof: BatchMerkleProof<H>,
query_values: Vec<Vec<E>>,
) -> Self {
assert!(!query_values.is_empty(), "query values cannot be empty");
let elements_per_query = query_values[0].len();
assert_ne!(
elements_per_query, 0,
"a query must contain at least one evaluation"
);
// TODO: add debug check that values actually hash into the leaf nodes of the batch proof
// concatenate all elements together into a single vector of bytes
let num_queries = query_values.len();
let mut values = Vec::with_capacity(num_queries * elements_per_query * E::ELEMENT_BYTES);
for elements in query_values.iter() {
assert_eq!(
elements.len(),
elements_per_query,
"all queries must contain the same number of evaluations"
);
values.write(elements);
}
// serialize internal nodes of the batch Merkle proof; we care about internal nodes only
// because leaf nodes can be reconstructed from hashes of query values
let paths = merkle_proof.serialize_nodes();
Queries { paths, values }
}
// PARSER
// --------------------------------------------------------------------------------------------
/// Convert internally stored bytes into a set of query values and the corresponding Merkle
/// authentication paths.
///
/// # Panics
/// Panics if:
/// * `domain_size` is not a power of two.
/// * `num_queries` is zero.
/// * `values_per_query` is zero.
pub fn parse<H, E>(
self,
domain_size: usize,
num_queries: usize,
values_per_query: usize,
) -> Result<(BatchMerkleProof<H>, Vec<Vec<E>>), DeserializationError>
where
E: FieldElement,
H: ElementHasher<BaseField = E::BaseField>,
{
assert!(
domain_size.is_power_of_two(),
"domain size must be a power of two"
);
assert!(num_queries > 0, "there must be at least one query");
assert!(
values_per_query > 0,
"a query must contain at least one value"
);
// make sure we have enough bytes to read the expected number of queries
let num_query_bytes = E::ELEMENT_BYTES * values_per_query;
let expected_bytes = num_queries * num_query_bytes;
if self.values.len() != expected_bytes {
return Err(DeserializationError::InvalidValue(format!(
"expected {} query value bytes, but was {}",
expected_bytes,
self.values.len()
)));
}
let mut hashed_queries = vec![H::Digest::default(); num_queries];
let mut query_values = Vec::with_capacity(num_queries);
// read bytes corresponding to each query, convert them into field elements,
// and also hash them to build leaf nodes of the batch Merkle proof
let mut reader = SliceReader::new(&self.values);
for query_hash in hashed_queries.iter_mut() {
let elements = E::read_batch_from(&mut reader, values_per_query)?;
*query_hash = H::hash_elements(&elements);
query_values.push(elements);
}
// build batch Merkle proof
let mut reader = SliceReader::new(&self.paths);
let tree_depth = log2(domain_size) as u8;
let merkle_proof = BatchMerkleProof::deserialize(&mut reader, hashed_queries, tree_depth)?;
if reader.has_more_bytes() {
return Err(DeserializationError::UnconsumedBytes);
}
Ok((merkle_proof, query_values))
}
}
impl Serializable for Queries {
/// Serializes `self` and writes the resulting bytes into the `target`.
fn write_into<W: ByteWriter>(&self, target: &mut W) {
// write value bytes
target.write_u32(self.values.len() as u32);
target.write_u8_slice(&self.values);
// write path bytes
target.write_u32(self.paths.len() as u32);
target.write_u8_slice(&self.paths);
}
}
impl Deserializable for Queries {
/// Reads a query struct from the specified `source` and returns the result
///
/// # Errors
/// Returns an error of a valid query struct could not be read from the specified source.
fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> {
// read values
let num_value_bytes = source.read_u32()?;
let values = source.read_u8_vec(num_value_bytes as usize)?;
// read paths
let num_paths_bytes = source.read_u32()?;
let paths = source.read_u8_vec(num_paths_bytes as usize)?;
Ok(Queries { paths, values })
}
}
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// UsageLogsByIndexHour : Number of indexed logs for each hour and index for a given organization.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UsageLogsByIndexHour {
/// The total number of indexed logs for the queried hour.
#[serde(rename = "event_count", skip_serializing_if = "Option::is_none")]
pub event_count: Option<i64>,
/// The hour for the usage.
#[serde(rename = "hour", skip_serializing_if = "Option::is_none")]
pub hour: Option<String>,
/// The index ID for this usage.
#[serde(rename = "index_id", skip_serializing_if = "Option::is_none")]
pub index_id: Option<String>,
/// The user specified name for this index ID.
#[serde(rename = "index_name", skip_serializing_if = "Option::is_none")]
pub index_name: Option<String>,
/// The retention period (in days) for this index ID.
#[serde(rename = "retention", skip_serializing_if = "Option::is_none")]
pub retention: Option<i64>,
}
impl UsageLogsByIndexHour {
/// Number of indexed logs for each hour and index for a given organization.
pub fn new() -> UsageLogsByIndexHour {
UsageLogsByIndexHour {
event_count: None,
hour: None,
index_id: None,
index_name: None,
retention: None,
}
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
|
use std::collections::HashSet;
use parser::{Ast, Block};
pub fn visitor(ast: &Ast) -> HashSet<String> {
let mut res = HashSet::new();
for block in ast.blocks.iter() {
if let &Block::Css(_, ref rules) = block {
for rule in rules.iter() {
for sel in rule.selectors.iter() {
if sel.classes.len() == 0 && sel.element.is_some() {
res.insert(sel.element.as_ref().unwrap().clone());
}
}
}
}
}
return res;
}
|
use std::time::Instant;
#[derive(Debug)]
pub struct GeoIpResponse {
pub ip: GeoIpDataResponse,
pub city: GeoIpCityResponse,
pub asn: GeoIpAsnResponse,
}
#[derive(Debug)]
pub struct GeoIpDataResponse {
pub ip: String,
pub ptr: String,
}
#[derive(Debug)]
pub struct GeoIpCityResponse {
pub name: String,
pub state: String,
pub country: String,
pub country_iso_code: String,
}
#[derive(Debug)]
pub struct GeoIpAsnResponse {
pub number: String,
pub name: String,
}
pub fn ip_to_geoip(ips: Vec<&str>, reader_asn: &maxminddb::Reader<Vec<u8>>, reader_city: &maxminddb::Reader<Vec<u8>>) -> Result<Vec<GeoIpResponse>, std::io::Error> {
let mut array_geoip: Vec<GeoIpResponse> = vec![];
for ip_addr in ips.iter() {
let now = Instant::now();
log::info!("Geolocating IP {}", ip_addr);
let ip_result = dns_lookup::lookup_host(ip_addr);
if let Err(e) = ip_result {
log::info!("Cannot resolve IP for domain: {}, error: {}", ip_addr, e);
return Err(e);
}
let ip = *ip_result.unwrap().first().unwrap();
if ip.to_string().ne(ip_addr) {
log::info!("Resolved DNS {} to IP {}", ip_addr, ip.to_string())
}
let ptr_dns = match dns_lookup::lookup_addr(&ip) {
Ok(ptr) => ptr,
Err(e) => {
log::error!("Couldn't resolve PTR of IP {}. Error: {}", &ip, e);
ip.to_string()
}
};
let ptr = if ptr_dns.eq(&ip.to_string()) { "No PTR".to_string() } else { ptr_dns };
let asn_option: Result<maxminddb::geoip2::Asn, maxminddb::MaxMindDBError> = reader_asn.lookup(ip);
let city_option: Result<maxminddb::geoip2::City, maxminddb::MaxMindDBError> = reader_city.lookup(ip);
let mut city_name: String = "No City".to_string();
let mut state_name: String = "No State".to_string();
let mut country_name: String = "No Country".to_string();
let mut country_iso_code: String = "No Country".to_string();
let mut asn_number: String = "No ASN".to_string();
let mut asn_name: String = "No ASN".to_string();
match city_option {
Ok(city) => {
if let Some(i) = city.city {
city_name = i.names.as_ref().unwrap().get("en").unwrap().to_string()
} else {
log::error!("No City found for IP: {}", ip);
}
if let Some(i) = &city.subdivisions {
state_name = i.first().unwrap().names.as_ref().unwrap().get("en").unwrap().to_string()
} else {
log::error!("No State found for IP: {}", ip);
}
if let Some(i) = &city.country {
country_name = i.names.as_ref().unwrap().get("en").unwrap().to_string()
} else {
log::error!("No Country found for IP: {}", ip);
}
if let Some(i) = &city.country {
country_iso_code = i.iso_code.unwrap().to_owned()
} else {
log::error!("No Country ISO code found for IP: {}", ip);
}
}
Err(err) => log::error!("An error happened while searching City for IP: {}, {}", ip, err),
}
match asn_option {
Ok(asn) => {
asn_number = format!("AS{}", asn.autonomous_system_number.unwrap_or(0).to_string());
asn_name = asn.autonomous_system_organization.unwrap_or("No ASN name").to_string();
}
Err(err) => log::error!("An error happened while searching ASN for IP: {}, {}", ip, err),
}
let response = GeoIpResponse {
ip: GeoIpDataResponse {
ip: (ip.to_string()).parse().unwrap(),
ptr: ptr.to_string(),
},
city: GeoIpCityResponse {
name: city_name,
state: state_name,
country: country_name,
country_iso_code,
},
asn: GeoIpAsnResponse {
number: asn_number,
name: asn_name,
},
};
array_geoip.push(response);
log::info!("Done geolocalization of IP: {}. Elapsed time: {:?}", ip, now.elapsed());
}
return Ok(array_geoip);
} |
use cards::{Card, TarockCard, Tarock1, Tarock21, TarockSkis, SuitCard,
Clubs, Spades, Hearts, Diamonds, King, CardSuit, CARD_TAROCK_PAGAT};
use player::Player;
use std::collections::HashSet;
use contracts::Contract;
pub static BONUS_TYPES: [BonusType, ..5] = [
Trula,
Kings,
KingUltimo,
PagatUltimo,
Valat,
];
// Type of point bonus.
#[deriving(Clone, Show, Eq, PartialEq, Hash)]
pub enum BonusType {
Trula,
Kings,
KingUltimo,
PagatUltimo,
Valat
}
impl BonusType {
// Value of bonus
pub fn value(&self) -> int {
match *self {
Trula => 10,
Kings => 10,
KingUltimo => 10,
PagatUltimo => 25,
Valat => 250,
}
}
}
// Bonunes are additional ways to earn points.
#[deriving(Clone, Show)]
pub enum Bonus {
Unannounced(BonusType),
Announced(BonusType),
}
impl Bonus {
// Value of bonus.
// Announced bonus is worth 2 times more than an announced one.
pub fn value(&self) -> int {
match *self {
Unannounced(bt) => bt.value(),
Announced(bt) => 2 * bt.value(),
}
}
// Returns true if bonus is announced.
pub fn is_announced(&self) -> bool {
match *self {
Unannounced(_) => false,
Announced(_) => true,
}
}
}
// Checks if cards contain a trula.
pub fn has_trula(cards: &[Card]) -> bool {
let mut pagat = false;
let mut mond = false;
let mut skis = false;
for card in cards.iter() {
match *card {
TarockCard(Tarock1) => pagat = true,
TarockCard(Tarock21) => mond = true,
TarockCard(TarockSkis) => skis = true,
_ => {}
}
if pagat && mond && skis {
return true
}
}
false
}
// Returns true if bonuses are allowed to be announced for the contract.
pub fn bonuses_allowed(contract: &Contract) -> bool {
contract.is_normal()
}
// Checks if cards contain all four kings.
pub fn has_kings(cards: &[Card]) -> bool {
let mut clubs = false;
let mut spades = false;
let mut hearts = false;
let mut diamonds = false;
for card in cards.iter() {
match *card {
SuitCard(King, Clubs) => clubs = true,
SuitCard(King, Spades) => spades = true,
SuitCard(King, Hearts) => hearts = true,
SuitCard(King, Diamonds) => diamonds = true,
_ => {}
}
if clubs && spades && hearts && diamonds {
return true
}
}
false
}
// Returns a set of valid bonuses for the player.
pub fn valid_bonuses(player: &Player, king: Option<CardSuit>) -> HashSet<BonusType> {
let mut bonuses = HashSet::new();
// Always valid bonuses.
bonuses.insert(Trula);
bonuses.insert(Kings);
bonuses.insert(Valat);
if has_king(player, king) {
bonuses.insert(KingUltimo);
}
if has_pagat(player) {
bonuses.insert(PagatUltimo);
}
return bonuses
}
// Returns true if the player owns the king of specified suit.
// If no king is given it always returns false.
fn has_king(player: &Player, king: Option<CardSuit>) -> bool {
king.map(|suit| player.hand().has_card(&SuitCard(King, suit))).unwrap_or(false)
}
// Returns true if the player owns the pagat card.
fn has_pagat(player: &Player) -> bool {
player.hand().has_card(&CARD_TAROCK_PAGAT)
}
#[cfg(test)]
mod test {
use super::{BONUS_TYPES, Unannounced, Announced, has_trula, has_kings,
valid_bonuses, Trula, Kings, Valat, KingUltimo, PagatUltimo};
use cards::*;
use player::Player;
#[test]
fn announced_bonuses_are_worth_two_times_more() {
for bonus_type in BONUS_TYPES.iter() {
assert_eq!(2 * Unannounced(*bonus_type).value(), Announced(*bonus_type).value());
}
}
#[test]
fn succeeds_if_cards_contain_trula() {
let mut cards = vec!(CARD_CLUBS_KING, CARD_TAROCK_10, CARD_TAROCK_PAGAT,
CARD_HEARTS_KING, CARD_DIAMONDS_KING, CARD_CLUBS_EIGHT,
CARD_TAROCK_SKIS);
assert!(!has_trula(cards.as_slice()));
cards.push(CARD_TAROCK_MOND);
assert!(has_trula(cards.as_slice()));
}
#[test]
fn succeeds_if_cards_contain_all_four_kings() {
let mut cards = vec!(CARD_CLUBS_KING, CARD_TAROCK_10, CARD_SPADES_KING,
CARD_HEARTS_KING, CARD_DIAMONDS_QUEEN, CARD_CLUBS_EIGHT,
CARD_TAROCK_SKIS);
assert!(!has_kings(cards.as_slice()));
cards.push(CARD_DIAMONDS_KING);
assert!(has_kings(cards.as_slice()));
}
#[test]
fn king_ultimo_valid_if_the_player_has_the_called_king() {
let mut cards = vec!(CARD_CLUBS_KING, CARD_TAROCK_10, CARD_CLUBS_SEVEN,
CARD_HEARTS_NINE, CARD_DIAMONDS_NINE, CARD_CLUBS_EIGHT);
let hand = Hand::new(cards.as_slice());
let player = Player::new(0, hand);
assert_eq!(valid_bonuses(&player, Some(Hearts)), set![Trula, Kings, Valat]);
cards.push(CARD_HEARTS_KING);
let hand = Hand::new(cards.as_slice());
let player = Player::new(0, hand);
assert_eq!(valid_bonuses(&player, Some(Hearts)), set![Trula, Kings, Valat, KingUltimo]);
}
#[test]
fn pagat_ultimo_valid_only_if_the_player_has_the_pagat_card() {
let mut cards = vec!(CARD_CLUBS_KING, CARD_TAROCK_10, CARD_CLUBS_SEVEN,
CARD_HEARTS_NINE, CARD_DIAMONDS_NINE, CARD_CLUBS_EIGHT);
let hand = Hand::new(cards.as_slice());
let player = Player::new(0, hand);
assert_eq!(valid_bonuses(&player, Some(Hearts)), set![Trula, Kings, Valat]);
cards.push(CARD_TAROCK_PAGAT);
let hand = Hand::new(cards.as_slice());
let player = Player::new(0, hand);
assert_eq!(valid_bonuses(&player, Some(Hearts)), set![Trula, Kings, Valat, PagatUltimo]);
}
}
|
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
use std::error::Error;
use crate::bot::bot::init_bot;
use crate::db::client::DbClient;
use crate::task::task::init_task;
mod bot;
mod db;
mod reddit;
mod task;
mod telegram;
embed_migrations!();
pub async fn start(tg_token: String, database_url: String) -> Result<(), Box<dyn Error>> {
run_migrations(&database_url);
init_task(&tg_token, &database_url);
init_bot(&tg_token, &database_url).await;
Ok(())
}
fn run_migrations(database_url: &str) {
let db_client = DbClient::new(database_url);
embedded_migrations::run(&db_client.conn).expect("Failed to run migrations");
}
|
use super::operations::Response;
use crate::{common::tt, data::DatabasePool};
use api_models::guild::Guild;
use serenity::model::{
channel::GuildChannel, id::UserId, interactions::ApplicationCommandInteractionData,
};
pub async fn set_voice(
ctx: &serenity::client::Context,
data: &ApplicationCommandInteractionData,
_user_id: UserId,
text_channel: &GuildChannel,
) -> anyhow::Result<Option<Response>> {
let pool = {
let data = ctx.data.as_ref().read().await;
data.get::<DatabasePool>().unwrap().clone()
};
let opt = {
let op = data.options.first().unwrap();
op.value.clone().unwrap().to_string().replace("\"", "")
};
let guild_id = text_channel.guild_id.0 as i64;
let locale = Guild::get(&pool, guild_id).await?.locale;
let resp = Response {
message: format!("{} **{}**", tt(&locale, "UpdateVoiceModel"), &opt),
embeds: vec![],
ephemeral: false,
};
Guild::set_voice_model(&pool, guild_id, opt).await?;
Ok(Some(resp))
}
|
use std::{collections::HashMap, path::PathBuf};
use bevy::{prelude::*, reflect::TypeUuid, render::{pipeline::{PipelineDescriptor, RenderPipeline}, render_graph::{AssetRenderResourcesNode, RenderGraph, RenderResourcesNode}, renderer::RenderResources, shader::{ShaderStage, ShaderStages}}};
use crate::utils::reflection::Reflectable;
#[derive(Debug)]
pub struct ShaderConfig {
name: String,
pub source: String,
pub shader: Handle<Shader>
}
#[derive(new, Debug)]
pub struct ShaderConfigBundle {
#[new(value = "None")]
pub vertex: Option<ShaderConfig>,
#[new(value = "None")]
pub fragment: Option<ShaderConfig>,
#[new(value = "None")]
pub compute: Option<ShaderConfig>,
}
fn load_lib_folder(mut include: &mut glsl_include::Context, path: PathBuf, root_path: &PathBuf) {
for entry in std::fs::read_dir(path).unwrap() {
let entry = entry.unwrap();
let child_path = entry.path();
if child_path.is_file() && child_path.exists() {
let filename = child_path.strip_prefix(root_path).unwrap().to_str().unwrap().replace("\\", "/").to_string();
let content = std::fs::read_to_string(child_path).unwrap();
include = include.include(filename.clone(), content.clone());
} else if child_path.is_dir() {
load_lib_folder(include, child_path, root_path);
}
}
}
pub fn load_shader(
name: &str,
mut shaders: ResMut<Assets<Shader>>,
asset_server: ResMut<AssetServer>,
) -> ShaderConfigBundle {
asset_server.watch_for_changes().unwrap();
let cwd = std::env::current_dir().unwrap();
let base_path = cwd.join("assets/shaders");
let lib_folder = base_path.join("lib");
let mut vertex = glsl_include::Context::new();
let mut fragment = glsl_include::Context::new();
let mut veretex_include = vertex.include("", "");
let mut fragment_include = fragment.include("", "");
load_lib_folder(veretex_include, lib_folder.clone(), &lib_folder.clone());
load_lib_folder(fragment_include, lib_folder.clone(), &lib_folder.clone());
let mut shader_bundle = ShaderConfigBundle::new();
let vertex_path = base_path.join(format!("{0}/{0}.vert", name));
if vertex_path.exists() {
let vertex_source = std::fs::read_to_string(vertex_path).unwrap();
let vertex_source = veretex_include.expand(vertex_source).unwrap();
shader_bundle.vertex = Some(ShaderConfig {
name: format!("{}.vert", name),
source: vertex_source.clone(),
shader: shaders.add(Shader::from_glsl(ShaderStage::Vertex, &vertex_source))
});
}
let fragment_path = base_path.join(format!("{0}/{0}.frag", name));
if fragment_path.exists() {
let fragment_source = std::fs::read_to_string(fragment_path).unwrap();
let fragment_source = fragment_include.expand(fragment_source).unwrap();
shader_bundle.fragment = Some(ShaderConfig {
name: format!("{}.frag", name),
source: fragment_source.clone(),
shader: shaders.add(Shader::from_glsl(ShaderStage::Fragment, &fragment_source))
});
}
let compute_path = base_path.join(format!("{0}/{0}.comp", name));
if compute_path.exists() {
let compute_source = std::fs::read_to_string(compute_path).unwrap();
let compute_source = fragment_include.expand(compute_source).unwrap();
shader_bundle.compute = Some(ShaderConfig {
name: format!("{}.comp", name),
source: compute_source.clone(),
shader: shaders.add(Shader::from_glsl(ShaderStage::Compute, &compute_source))
});
}
return shader_bundle;
}
pub fn setup_material<T: TypeUuid + RenderResources + Reflectable>(
asset_server: ResMut<AssetServer>,
mut shader_cache: ResMut<super::ShaderCache>,
mut pipelines: ResMut<Assets<PipelineDescriptor>>,
mut render_graph: ResMut<RenderGraph>,
shaders: ResMut<Assets<Shader>>,
) -> Option<RenderPipeline> {
let name = T::struct_name();
if shader_cache.cache.contains_key(name) {
return Some(shader_cache.cache[name].clone());
}
let shader_bundle = load_shader(&name, shaders, asset_server);
if shader_bundle.vertex.is_some() {
let pipeline_handle = pipelines.add(PipelineDescriptor::default_config(ShaderStages {
vertex: shader_bundle.vertex.unwrap().shader,
fragment: shader_bundle.fragment.map(|x| x.shader)
}));
render_graph.add_system_node(
name,
RenderResourcesNode::<T>::new(true),
);
render_graph
.add_node_edge(name, bevy::render::render_graph::base::node::MAIN_PASS)
.unwrap();
let render_pipe = RenderPipeline::new(
pipeline_handle,
);
shader_cache.cache.insert(name.to_string(), render_pipe.clone());
return Some(render_pipe);
}
return None;
} |
// This file is part of dpdk. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/dpdk/master/COPYRIGHT. No part of dpdk, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2017 The developers of dpdk. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/dpdk/master/COPYRIGHT.
#[allow(dead_code)]
pub struct ExtendedSharedReceiveQueue<'a, C: CompletionQueue>
where C: 'a
{
pub(crate) unextendedSharedReceiveQueue: UnextendedSharedReceiveQueue<'a>,
pub(crate) extendedReliableConnectionDomain: &'a ExtendedReliableConnectionDomain<'a>,
pub(crate) completionQueue: &'a C,
}
impl<'a, C: CompletionQueue> SharedReceiveQueue for ExtendedSharedReceiveQueue<'a, C>
{
#[doc(hidden)]
#[inline(always)]
fn pointer(&self) -> *mut ibv_srq
{
self.unextendedSharedReceiveQueue.pointer()
}
#[doc(hidden)]
#[inline(always)]
fn settings(&mut self) -> &mut SharedReceiveQueueSettings
{
self.unextendedSharedReceiveQueue.settings()
}
#[doc(hidden)]
#[inline(always)]
fn context(&self) -> &Context
{
self.unextendedSharedReceiveQueue.context()
}
#[doc(hidden)]
#[inline(always)]
fn isValidForProtectionDomain<'b>(&self, protectionDomain: &ProtectionDomain<'b>) -> bool
{
self.unextendedSharedReceiveQueue.isValidForProtectionDomain(protectionDomain)
}
}
|
extern crate tables;
use tables::*;
#[test]
fn join_one() {
let mut a = SparseMap::new();
for i in 0..1_000 {
a.insert(i, i);
}
for (i, (a,)) in (&a,).join() {
assert_eq!(i, *a);
}
}
#[test]
fn sparse_mut() {
let mut a = SparseMap::new();
let mut b = SparseMap::new();
for i in 0..1_000 {
a.insert(i, i);
b.insert(i, false);
}
for (_, (a, b)) in (&a, &mut b).join() {
*b = a % 3 == 0;
}
for (_, (a, b)) in (&a, &b).join() {
assert_eq!(*b, a % 3 == 0)
}
}
#[test]
fn vecmap_mut() {
let mut a = VecMap::new();
let mut b = VecMap::new();
for i in 0..1_000 {
a.insert(i, i);
b.insert(i, false);
}
for (_, (a, b)) in (&a, &mut b).join() {
*b = a % 3 == 0;
}
for (_, (a, b)) in (&a, &b).join() {
assert_eq!(*b, a % 3 == 0)
}
}
#[test]
fn primes_search() {
let mut primes = BitSet::new();
for i in 2..10_000 {
let mut prime = true;
for (p, ()) in &primes {
if p * 2 > i {
break;
}
if i % p == 0 {
prime = false;
break;
}
}
if prime {
primes.add(i);
}
}
assert_eq!(primes.iter().count(), 1229);
}
|
pub mod event;
use self::event::EventStopable;
use std::collections::HashMap;
pub trait ListenerCallable: PartialEq {
fn call(&self, event_name: &str, event: &mut EventStopable);
}
pub struct EventListener {
callback: fn(event_name: &str, event: &mut EventStopable),
}
impl EventListener {
pub fn new (callback: fn(event_name: &str, event: &mut EventStopable)) -> EventListener {
EventListener {callback: callback}
}
}
impl ListenerCallable for EventListener {
fn call (&self, event_name: &str, event: &mut EventStopable) {
let callback = self.callback;
callback(event_name, event);
}
}
impl PartialEq for EventListener {
fn eq(&self, other: &EventListener) -> bool {
(self.callback as *const()) == (other.callback as *const())
}
fn ne(&self, other: &EventListener) -> bool {
!self.eq(other)
}
}
pub trait Dispatchable<S> where S: EventStopable {
fn dispatch (&self, event_name: &str, event: &mut S);
}
pub struct EventDispatcher<'a, L> where L: 'a + ListenerCallable {
listeners: HashMap<&'a str, Vec<&'a L>>,
}
impl<'a, L: 'a + ListenerCallable> EventDispatcher<'a, L> {
pub fn new() -> EventDispatcher<'a, L> {
EventDispatcher{listeners: HashMap::new()}
}
pub fn add_listener(&mut self, event_name: &'a str, listener: &'a L) {
if !self.listeners.contains_key(event_name) {
self.listeners.insert(event_name, Vec::new());
}
if let Some(mut listeners) = self.listeners.get_mut(event_name) {
listeners.push(listener);
}
}
pub fn remove_listener(&mut self, event_name: &'a str, listener: &'a mut L) {
if self.listeners.contains_key(event_name) {
if let Some(mut listeners) = self.listeners.get_mut(event_name) {
match listeners.iter().position(|x| *x == listener) {
Some(index) => {
listeners.remove(index);
},
_ => {},
}
}
}
}
}
impl<'a, S: 'a + EventStopable> Dispatchable<S> for EventDispatcher<'a, EventListener> {
fn dispatch(&self, event_name: &str, event: &mut S) {
if let Some(listeners) = self.listeners.get(event_name) {
for listener in listeners {
listener.call(event_name, event);
if !event.is_propagation_stopped() {
break;
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::event::*;
fn print_event_info(event_name: &str, event: &mut EventStopable) {
println!("callback from event: {}", event_name);
event.stop_propagation();
}
#[test]
fn test_dispatcher() {
let event_name = "test_a";
let mut event = Event::new();
let callback_one: fn(event_name: &str, event: &mut EventStopable) = print_event_info;
let mut listener_one = EventListener::new(callback_one);
let mut dispatcher = EventDispatcher::new();
dispatcher.dispatch(event_name, &mut event);
assert_eq!(false, event.is_propagation_stopped());
dispatcher.dispatch(event_name, &mut event);
assert_eq!(false, event.is_propagation_stopped());
dispatcher.add_listener(event_name, &mut listener_one);
dispatcher.dispatch(event_name, &mut event);
assert_eq!(true, event.is_propagation_stopped());
}
}
|
use poem::middlewares::StripPrefix;
use poem::route::{self, Route};
use poem::EndpointExt;
async fn hello() -> &'static str {
"hello"
}
#[tokio::main]
async fn main() {
let route = Route::new().at("/hello", route::get(hello));
let api = Route::new().at("/api/*", route.with(StripPrefix::new("/api")));
poem::Server::new(api)
.serve(&"127.0.0.1:3000".parse().unwrap())
.await
.unwrap();
}
|
// Copyright 2019 Guillaume Becquin
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use rust_tokenizers;
use rust_tokenizers::preprocessing::vocab::base_vocab::Vocab;
use rust_tokenizers::preprocessing::tokenizer::base_tokenizer::{TruncationStrategy, Tokenizer};
use std::env;
use rust_tokenizers::preprocessing::adapters::Example;
use std::sync::Arc;
use rust_tokenizers::BertTokenizer;
fn main() {
let vocab_path = env::var("bert_vocab").expect("`bert_vocab` environment variable not set");
let vocab = Arc::new(rust_tokenizers::BertVocab::from_file(vocab_path.as_str()));
let _test_sentence = Example::new_from_string("This is a sample sentence to be tokenized");
let bert_tokenizer: BertTokenizer = BertTokenizer::from_existing_vocab(vocab.clone(), true);
println!("{:?}", bert_tokenizer.encode(_test_sentence.sentence_1.as_str(),
None,
128,
&TruncationStrategy::LongestFirst,
0));
}
|
extern crate nix;
use nix::sched::*;
extern crate pentry;
fn print_process_info() {
if let Ok(ps) = pentry::current() {
println!("{:?}",ps);
}
}
fn child() -> isize {
print_process_info();
0
}
fn main() {
print_process_info();
const STACK_SIZE:usize = 1024* 1024;
let ref mut stack: [u8; STACK_SIZE] = [0; STACK_SIZE];
let cb = Box::new(|| child());
let clone_flags = CloneFlags::empty();
let process = clone(cb, stack, clone_flags, None);
let process = match process {
Ok(process) => process,
Err(err) => panic!(err),
};
}
|
// Copyright 2020 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
#![allow(dead_code)]
use wasmlib::*;
pub const SC_NAME: &str = "fairauction";
pub const SC_HNAME: ScHname = ScHname(0x1b5c43b1);
pub const PARAM_COLOR: &str = "color";
pub const PARAM_DESCRIPTION: &str = "description";
pub const PARAM_DURATION: &str = "duration";
pub const PARAM_MINIMUM_BID: &str = "minimumBid";
pub const PARAM_OWNER_MARGIN: &str = "ownerMargin";
pub const VAR_AUCTIONS: &str = "auctions";
pub const VAR_BIDDER_LIST: &str = "bidderList";
pub const VAR_BIDDERS: &str = "bidders";
pub const VAR_COLOR: &str = "color";
pub const VAR_CREATOR: &str = "creator";
pub const VAR_DEPOSIT: &str = "deposit";
pub const VAR_DESCRIPTION: &str = "description";
pub const VAR_DURATION: &str = "duration";
pub const VAR_HIGHEST_BID: &str = "highestBid";
pub const VAR_HIGHEST_BIDDER: &str = "highestBidder";
pub const VAR_INFO: &str = "info";
pub const VAR_MINIMUM_BID: &str = "minimumBid";
pub const VAR_NUM_TOKENS: &str = "numTokens";
pub const VAR_OWNER_MARGIN: &str = "ownerMargin";
pub const VAR_WHEN_STARTED: &str = "whenStarted";
pub const FUNC_FINALIZE_AUCTION: &str = "finalizeAuction";
pub const FUNC_PLACE_BID: &str = "placeBid";
pub const FUNC_SET_OWNER_MARGIN: &str = "setOwnerMargin";
pub const FUNC_START_AUCTION: &str = "startAuction";
pub const VIEW_GET_INFO: &str = "getInfo";
pub const HFUNC_FINALIZE_AUCTION: ScHname = ScHname(0x8d534ddc);
pub const HFUNC_PLACE_BID: ScHname = ScHname(0x9bd72fa9);
pub const HFUNC_SET_OWNER_MARGIN: ScHname = ScHname(0x1774461a);
pub const HFUNC_START_AUCTION: ScHname = ScHname(0xd5b7bacb);
pub const HVIEW_GET_INFO: ScHname = ScHname(0xcfedba5f);
|
#[macro_use]
extern crate nom;
#[macro_use]
extern crate nom_locate;
mod ast;
mod error;
mod format;
mod interpreter;
mod parser;
use crate::interpreter::Interpreter;
use nom::simple_errors::Context;
use nom::types::CompleteStr;
pub use crate::error::Error;
use std::io::{BufRead, Write};
pub fn execute<R: BufRead, W: Write, V: Write>(
input: &str,
stdin: &mut R,
stdout: &mut W,
stderr: &mut V,
) -> Result<(), Error> {
let res = parser::program(parser::Span::new(CompleteStr(input)));
match res {
Ok((remaining, ast)) => {
if !remaining.fragment.is_empty() {
write!(stderr, "{}", error::format_remaining(&remaining.fragment)?)?;
Ok(())
} else {
let ast = ast?;
let interpreter = Interpreter::new(&ast, input);
interpreter.evaluate(stdin, stdout, stderr)?;
Ok(())
}
}
Err(nom::Err::Failure(Context::Code(span, nom::ErrorKind::Custom(err_code)))) => {
let err_output = parser::ErrorCode::from(err_code).to_string(&span, input);
write!(stderr, "{}", err_output)?;
Ok(())
}
Err(e) => Err(Error::Parser(format!("parser error: {}", e))),
}
}
|
extern crate nd;
use nd::{Array, Range, View, Zeros};
fn main() {
let shape = [3, 4, 5];
let mut array: Array<f32, 3> = Array::zeros(&shape);
for i in 0..shape.iter().product() {
array.data[i] = i as f32;
}
let start = [0, 0, 0];
let stop = shape.clone();
let step = [1, 1, 1];
let range = Range { start, stop, step };
println!("{}", array.view(&range).unwrap());
}
|
// Copyright 2017 rust-ipfs-api Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
//
//! Rust library for connecting to the IPFS HTTP API using tokio.
//!
//! ## Usage
//!
//! ```toml
//! [dependencies]
//! ipfs-api = "0.4.0-alpha"
//! ```
//!
//! ## Examples
//!
//! Write a file to IPFS:
//!
//! ```no_run
//! # extern crate ipfs_api;
//! # extern crate tokio_core;
//! #
//! use ipfs_api::IpfsClient;
//! use std::io::Cursor;
//! use tokio_core::reactor::Core;
//!
//! # fn main() {
//! let mut core = Core::new().unwrap();
//! let client = IpfsClient::default(&core.handle());
//! let data = Cursor::new("Hello World!");
//!
//! let req = client.add(data);
//! let res = core.run(req).unwrap();
//!
//! println!("{}", res.hash);
//! # }
//! ```
//!
//! Read a file from IPFS:
//!
//! ```no_run
//! # extern crate futures;
//! # extern crate ipfs_api;
//! # extern crate tokio_core;
//! #
//! use futures::stream::Stream;
//! use ipfs_api::IpfsClient;
//! use std::io::{self, Write};
//! use tokio_core::reactor::Core;
//!
//! # fn main() {
//! let mut core = Core::new().unwrap();
//! let client = IpfsClient::default(&core.handle());
//!
//! let req = client.get("/test/file.json").concat2();
//! let res = core.run(req).unwrap();
//! let out = io::stdout();
//! let mut out = out.lock();
//!
//! out.write_all(&res).unwrap();
//! # }
//! ```
//!
//! There are also a bunch of examples included in the project, which
//! I used for testing
//!
//! You can run any of the examples with cargo:
//!
//! ```sh
//! $ cargo run -p ipfs-api --example add_file
//! ```
extern crate bytes;
#[macro_use]
extern crate error_chain;
extern crate futures;
extern crate hyper;
extern crate hyper_multipart_rfc7578 as hyper_multipart;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate serde_urlencoded;
extern crate tokio_core;
extern crate tokio_io;
pub use client::IpfsClient;
pub use request::{KeyType, Logger, LoggingLevel};
mod request;
pub mod response;
mod client;
mod header;
mod read;
|
use panda::prelude::*;
use panda::regs::Reg;
use peg::{str::LineCol, error::ParseError};
pub(crate) enum Command {
Taint(TaintTarget, u32),
CheckTaint(TaintTarget),
GetTaint(TaintTarget),
Help,
MemInfo,
ThreadInfo,
ProcInfo,
ProcList,
}
impl Command {
pub(crate) fn parse(cmd: &str) -> Result<Self, ParseError<LineCol>> {
monitor_commands::command(cmd)
}
}
pub(crate) enum TaintTarget {
Address(target_ptr_t),
Register(Reg),
}
peg::parser!{
grammar monitor_commands() for str {
pub(crate) rule command() -> Command
= taint()
/ check_taint()
/ get_taint()
/ mem_info()
/ proc_info()
/ proc_list()
/ thread_info()
/ help()
rule help() -> Command
= "help" { Command::Help }
rule mem_info() -> Command
= "meminfo" { Command::MemInfo }
rule proc_info() -> Command
= "procinfo" { Command::ProcInfo }
rule proc_list() -> Command
= "proclist" { Command::ProcList }
rule thread_info() -> Command
= "threadinfo" { Command::ThreadInfo }
rule taint() -> Command
= "taint" _ target:taint_target() _ label:number() {
Command::Taint(target, label as u32)
}
rule taint_target() -> TaintTarget
= quiet!{
"*" addr:number() { TaintTarget::Address(addr) }
/ reg:register() { TaintTarget::Register(reg) }
}
/ expected!("an address (example: *0x55555555) or a register name")
rule check_taint() -> Command
= "check_taint" _ target:taint_target() { Command::CheckTaint(target) }
rule get_taint() -> Command
= "get_taint" _ target:taint_target() { Command::GetTaint(target) }
// TODO: display available registers on error
rule register() -> Reg
= reg:$(['a'..='z' | 'A'..='Z'] ['a'..='z' | 'A'..='Z' | '0'..='9']*) {?
reg.parse()
.map_err(|_| "invalid register name")
}
rule number() -> u64
= quiet!{
"0x" hex:$(['0'..='9' | 'a'..='f' | 'A'..='F']+) {?
u64::from_str_radix(hex, 16)
.map_err(|_| "invalid hex number")
}
/ decimal:$(['0'..='9']+) {?
decimal.parse()
.map_err(|_| "invalid decimal number")
}
}
/ expected!("a number")
rule _() = quiet!{ [' ' | '\n' | '\t']+ }
}
}
|
#[allow(unused_imports)]
use std::{io, fmt};
macro_rules! read_parse {
($($t:ty),*) => ({
let mut a_str = String::new();
io::stdin().read_line(&mut a_str).expect("read error");
let mut a_iter = a_str.split_whitespace();
(
$(
a_iter.next().unwrap().parse::<$t>().expect("parse error"),
)*
)
})
}
fn main() {
let (_num, max) = read_parse!(isize, isize);
let mut card: Vec<isize> = Vec::new();
let mut card_str = String::new();
io::stdin().read_line(&mut card_str).expect("card error");
let card_iter = card_str.split_whitespace();
for i in card_iter {
let a= i.trim().parse::<isize>().expect("e");
card.push(a);
}
let mut rst: isize =0;
for i in 0..card.len() {
for j in i+1..card.len() {
for k in j+1..card.len(){
if card[i]+card[j]+card[k]<= max && card[i]+card[j]+card[k]> rst{
rst = card[i]+card[j]+card[k];
}
}
}
}
println!("{}", rst);
} |
//! A module which contains a general settings which might be used in other grid implementations.
mod alignment;
mod border;
mod borders;
mod entity;
mod indent;
mod line;
mod position;
mod sides;
pub mod compact;
#[cfg(feature = "std")]
pub mod spanned;
pub use alignment::{AlignmentHorizontal, AlignmentVertical};
pub use border::Border;
pub use borders::Borders;
pub use entity::{Entity, EntityIterator};
pub use indent::Indent;
pub use line::Line;
pub use position::Position;
pub use sides::Sides;
|
// Generated from affine.rs.tera template. Edit the template, not the generated file.
use crate::{Mat2, Mat3, Mat3A, Vec2, Vec3A};
use core::ops::{Deref, DerefMut, Mul};
/// A 2D affine transform, which can represent translation, rotation, scaling and shear.
#[derive(Copy, Clone)]
#[repr(C)]
pub struct Affine2 {
pub matrix2: Mat2,
pub translation: Vec2,
}
impl Affine2 {
/// The degenerate zero transform.
///
/// This transforms any finite vector and point to zero.
/// The zero transform is non-invertible.
pub const ZERO: Self = Self {
matrix2: Mat2::ZERO,
translation: Vec2::ZERO,
};
/// The identity transform.
///
/// Multiplying a vector with this returns the same vector.
pub const IDENTITY: Self = Self {
matrix2: Mat2::IDENTITY,
translation: Vec2::ZERO,
};
/// All NAN:s.
pub const NAN: Self = Self {
matrix2: Mat2::NAN,
translation: Vec2::NAN,
};
/// Creates an affine transform from three column vectors.
#[inline(always)]
pub const fn from_cols(x_axis: Vec2, y_axis: Vec2, z_axis: Vec2) -> Self {
Self {
matrix2: Mat2::from_cols(x_axis, y_axis),
translation: z_axis,
}
}
/// Creates an affine transform from a `[f32; 6]` array stored in column major order.
#[inline]
pub fn from_cols_array(m: &[f32; 6]) -> Self {
Self {
matrix2: Mat2::from_cols_slice(&m[0..4]),
translation: Vec2::from_slice(&m[4..6]),
}
}
/// Creates a `[f32; 6]` array storing data in column major order.
#[inline]
pub fn to_cols_array(&self) -> [f32; 6] {
let x = &self.matrix2.x_axis;
let y = &self.matrix2.y_axis;
let z = &self.translation;
[x.x, x.y, y.x, y.y, z.x, z.y]
}
/// Creates an affine transform from a `[[f32; 2]; 3]`
/// 2D array stored in column major order.
/// If your data is in row major order you will need to `transpose` the returned
/// matrix.
#[inline]
pub fn from_cols_array_2d(m: &[[f32; 2]; 3]) -> Self {
Self {
matrix2: Mat2::from_cols(m[0].into(), m[1].into()),
translation: m[2].into(),
}
}
/// Creates a `[[f32; 2]; 3]` 2D array storing data in
/// column major order.
/// If you require data in row major order `transpose` the matrix first.
#[inline]
pub fn to_cols_array_2d(&self) -> [[f32; 2]; 3] {
[
self.matrix2.x_axis.into(),
self.matrix2.y_axis.into(),
self.translation.into(),
]
}
/// Creates an affine transform from the first 6 values in `slice`.
///
/// # Panics
///
/// Panics if `slice` is less than 6 elements long.
#[inline]
pub fn from_cols_slice(slice: &[f32]) -> Self {
Self {
matrix2: Mat2::from_cols_slice(&slice[0..4]),
translation: Vec2::from_slice(&slice[4..6]),
}
}
/// Writes the columns of `self` to the first 6 elements in `slice`.
///
/// # Panics
///
/// Panics if `slice` is less than 6 elements long.
#[inline]
pub fn write_cols_to_slice(self, slice: &mut [f32]) {
self.matrix2.write_cols_to_slice(&mut slice[0..4]);
self.translation.write_to_slice(&mut slice[4..6]);
}
/// Creates an affine transform that changes scale.
/// Note that if any scale is zero the transform will be non-invertible.
#[inline]
pub fn from_scale(scale: Vec2) -> Self {
Self {
matrix2: Mat2::from_diagonal(scale),
translation: Vec2::ZERO,
}
}
/// Creates an affine transform from the given rotation `angle`.
#[inline]
pub fn from_angle(angle: f32) -> Self {
Self {
matrix2: Mat2::from_angle(angle),
translation: Vec2::ZERO,
}
}
/// Creates an affine transformation from the given 2D `translation`.
#[inline]
pub fn from_translation(translation: Vec2) -> Self {
Self {
matrix2: Mat2::IDENTITY,
translation,
}
}
/// Creates an affine transform from a 2x2 matrix (expressing scale, shear and rotation)
#[inline]
pub fn from_mat2(matrix2: Mat2) -> Self {
Self {
matrix2,
translation: Vec2::ZERO,
}
}
/// Creates an affine transform from a 2x2 matrix (expressing scale, shear and rotation) and a
/// translation vector.
///
/// Equivalent to
/// `Affine2::from_translation(translation) * Affine2::from_mat2(mat2)`
#[inline]
pub fn from_mat2_translation(matrix2: Mat2, translation: Vec2) -> Self {
Self {
matrix2,
translation,
}
}
/// Creates an affine transform from the given 2D `scale`, rotation `angle` (in radians) and
/// `translation`.
///
/// Equivalent to `Affine2::from_translation(translation) *
/// Affine2::from_angle(angle) * Affine2::from_scale(scale)`
#[inline]
pub fn from_scale_angle_translation(scale: Vec2, angle: f32, translation: Vec2) -> Self {
let rotation = Mat2::from_angle(angle);
Self {
matrix2: Mat2::from_cols(rotation.x_axis * scale.x, rotation.y_axis * scale.y),
translation,
}
}
/// Creates an affine transform from the given 2D rotation `angle` (in radians) and
/// `translation`.
///
/// Equivalent to `Affine2::from_translation(translation) * Affine2::from_angle(angle)`
#[inline]
pub fn from_angle_translation(angle: f32, translation: Vec2) -> Self {
Self {
matrix2: Mat2::from_angle(angle),
translation,
}
}
/// The given `Mat3` must be an affine transform,
#[inline]
pub fn from_mat3(m: Mat3) -> Self {
use crate::swizzles::Vec3Swizzles;
Self {
matrix2: Mat2::from_cols(m.x_axis.xy(), m.y_axis.xy()),
translation: m.z_axis.xy(),
}
}
/// The given [`Mat3A`] must be an affine transform,
#[inline]
pub fn from_mat3a(m: Mat3A) -> Self {
use crate::swizzles::Vec3Swizzles;
Self {
matrix2: Mat2::from_cols(m.x_axis.xy(), m.y_axis.xy()),
translation: m.z_axis.xy(),
}
}
/// Transforms the given 2D point, applying shear, scale, rotation and translation.
#[inline]
pub fn transform_point2(&self, rhs: Vec2) -> Vec2 {
self.matrix2 * rhs + self.translation
}
/// Transforms the given 2D vector, applying shear, scale and rotation (but NOT
/// translation).
///
/// To also apply translation, use [`Self::transform_point2()`] instead.
#[inline]
pub fn transform_vector2(&self, rhs: Vec2) -> Vec2 {
self.matrix2 * rhs
}
/// Returns `true` if, and only if, all elements are finite.
///
/// If any element is either `NaN`, positive or negative infinity, this will return
/// `false`.
#[inline]
pub fn is_finite(&self) -> bool {
self.matrix2.is_finite() && self.translation.is_finite()
}
/// Returns `true` if any elements are `NaN`.
#[inline]
pub fn is_nan(&self) -> bool {
self.matrix2.is_nan() || self.translation.is_nan()
}
/// Returns true if the absolute difference of all elements between `self` and `rhs`
/// is less than or equal to `max_abs_diff`.
///
/// This can be used to compare if two 3x4 matrices contain similar elements. It works
/// best when comparing with a known value. The `max_abs_diff` that should be used used
/// depends on the values being compared against.
///
/// For more see
/// [comparing floating point numbers](https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/).
#[inline]
pub fn abs_diff_eq(&self, rhs: Self, max_abs_diff: f32) -> bool {
self.matrix2.abs_diff_eq(rhs.matrix2, max_abs_diff)
&& self.translation.abs_diff_eq(rhs.translation, max_abs_diff)
}
/// Return the inverse of this transform.
///
/// Note that if the transform is not invertible the result will be invalid.
#[must_use]
#[inline]
pub fn inverse(&self) -> Self {
let matrix2 = self.matrix2.inverse();
// transform negative translation by the matrix inverse:
let translation = -(matrix2 * self.translation);
Self {
matrix2,
translation,
}
}
}
impl Default for Affine2 {
#[inline(always)]
fn default() -> Self {
Self::IDENTITY
}
}
impl Deref for Affine2 {
type Target = crate::deref::Cols3<Vec2>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
unsafe { &*(self as *const Self as *const Self::Target) }
}
}
impl DerefMut for Affine2 {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { &mut *(self as *mut Self as *mut Self::Target) }
}
}
impl PartialEq for Affine2 {
#[inline]
fn eq(&self, rhs: &Self) -> bool {
self.matrix2.eq(&rhs.matrix2) && self.translation.eq(&rhs.translation)
}
}
#[cfg(not(target_arch = "spirv"))]
impl core::fmt::Debug for Affine2 {
fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
fmt.debug_struct(stringify!(Affine2))
.field("matrix2", &self.matrix2)
.field("translation", &self.translation)
.finish()
}
}
#[cfg(not(target_arch = "spirv"))]
impl core::fmt::Display for Affine2 {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(
f,
"[{}, {}, {}]",
self.matrix2.x_axis, self.matrix2.y_axis, self.translation
)
}
}
impl<'a> core::iter::Product<&'a Self> for Affine2 {
fn product<I>(iter: I) -> Self
where
I: Iterator<Item = &'a Self>,
{
iter.fold(Self::IDENTITY, |a, &b| a * b)
}
}
impl Mul for Affine2 {
type Output = Affine2;
#[inline]
fn mul(self, rhs: Affine2) -> Self::Output {
Self {
matrix2: self.matrix2 * rhs.matrix2,
translation: self.matrix2 * rhs.translation + self.translation,
}
}
}
impl From<Affine2> for Mat3 {
#[inline]
fn from(m: Affine2) -> Mat3 {
Self::from_cols(
m.matrix2.x_axis.extend(0.0),
m.matrix2.y_axis.extend(0.0),
m.translation.extend(1.0),
)
}
}
impl Mul<Mat3> for Affine2 {
type Output = Mat3;
#[inline]
fn mul(self, rhs: Mat3) -> Self::Output {
Mat3::from(self) * rhs
}
}
impl Mul<Affine2> for Mat3 {
type Output = Mat3;
#[inline]
fn mul(self, rhs: Affine2) -> Self::Output {
self * Mat3::from(rhs)
}
}
impl From<Affine2> for Mat3A {
#[inline]
fn from(m: Affine2) -> Mat3A {
Self::from_cols(
Vec3A::from((m.matrix2.x_axis, 0.0)),
Vec3A::from((m.matrix2.y_axis, 0.0)),
Vec3A::from((m.translation, 1.0)),
)
}
}
impl Mul<Mat3A> for Affine2 {
type Output = Mat3A;
#[inline]
fn mul(self, rhs: Mat3A) -> Self::Output {
Mat3A::from(self) * rhs
}
}
impl Mul<Affine2> for Mat3A {
type Output = Mat3A;
#[inline]
fn mul(self, rhs: Affine2) -> Self::Output {
self * Mat3A::from(rhs)
}
}
|
use std::sync::Arc;
use common::event::{EventPublisher, EventSubscriber};
use common::result::Result;
use crate::application::handler::{CollectionHandler, PublicationHandler};
use crate::domain::catalogue::{CatalogueRepository, CollectionService, PublicationService};
pub struct Container<EPub> {
event_pub: Arc<EPub>,
catalogue_repo: Arc<dyn CatalogueRepository>,
collection_serv: Arc<dyn CollectionService>,
publication_serv: Arc<dyn PublicationService>,
}
impl<EPub> Container<EPub>
where
EPub: EventPublisher,
{
pub fn new(
event_pub: Arc<EPub>,
catalogue_repo: Arc<dyn CatalogueRepository>,
collection_serv: Arc<dyn CollectionService>,
publication_serv: Arc<dyn PublicationService>,
) -> Self {
Container {
event_pub,
catalogue_repo,
collection_serv,
publication_serv,
}
}
pub async fn subscribe<ES>(&self, event_sub: &ES) -> Result<()>
where
ES: EventSubscriber,
{
let handler =
PublicationHandler::new(self.catalogue_repo.clone(), self.publication_serv.clone());
event_sub.subscribe(Box::new(handler)).await?;
let handler =
CollectionHandler::new(self.catalogue_repo.clone(), self.collection_serv.clone());
event_sub.subscribe(Box::new(handler)).await?;
Ok(())
}
pub fn event_pub(&self) -> &EPub {
&self.event_pub
}
pub fn catalogue_repo(&self) -> &dyn CatalogueRepository {
self.catalogue_repo.as_ref()
}
pub fn publication_serv(&self) -> &dyn PublicationService {
self.publication_serv.as_ref()
}
}
|
use std::path::PathBuf;
use structopt::StructOpt;
/// Rust sudoku solver
#[derive(StructOpt, Debug)]
#[structopt(name="Sudoku-rs")]
pub struct Opt {
/// Updates per second
#[structopt(short = "u", long = "ups", default_value = "120")]
pub ups: u64,
/// File containing the sudoku
#[structopt(name = "FILE", parse(from_os_str))]
pub file: PathBuf,
}
|
extern crate reqwest;
use reqwest::header;
fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut headers = header::HeaderMap::new();
headers.insert("A", "''a'".parse().unwrap());
headers.insert("B", "\"".parse().unwrap());
headers.insert(header::COOKIE, "x=1'; y=2\"".parse().unwrap());
headers.insert("Content-Type", "application/x-www-form-urlencoded".parse().unwrap());
let client = reqwest::blocking::Client::builder()
.redirect(reqwest::redirect::Policy::none())
.build()
.unwrap();
let res = client.post("http://localhost:28139")
.basic_auth("ol'", Some("asd\""))
.headers(headers)
.body("a=b&c=\"&d='")
.send()?
.text()?;
println!("{}", res);
Ok(())
}
|
#[doc = "Register `RCC_STGENCKSELR` reader"]
pub type R = crate::R<RCC_STGENCKSELR_SPEC>;
#[doc = "Register `RCC_STGENCKSELR` writer"]
pub type W = crate::W<RCC_STGENCKSELR_SPEC>;
#[doc = "Field `STGENSRC` reader - STGENSRC"]
pub type STGENSRC_R = crate::FieldReader;
#[doc = "Field `STGENSRC` writer - STGENSRC"]
pub type STGENSRC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
impl R {
#[doc = "Bits 0:1 - STGENSRC"]
#[inline(always)]
pub fn stgensrc(&self) -> STGENSRC_R {
STGENSRC_R::new((self.bits & 3) as u8)
}
}
impl W {
#[doc = "Bits 0:1 - STGENSRC"]
#[inline(always)]
#[must_use]
pub fn stgensrc(&mut self) -> STGENSRC_W<RCC_STGENCKSELR_SPEC, 0> {
STGENSRC_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "This register is used to select the peripheral clock for the STGEN block. Note that this clock is used to provide a time reference for the application. Refer to Section: Clock enabling delays. If TZEN = , this register can only be modified in secure mode.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rcc_stgenckselr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rcc_stgenckselr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RCC_STGENCKSELR_SPEC;
impl crate::RegisterSpec for RCC_STGENCKSELR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rcc_stgenckselr::R`](R) reader structure"]
impl crate::Readable for RCC_STGENCKSELR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`rcc_stgenckselr::W`](W) writer structure"]
impl crate::Writable for RCC_STGENCKSELR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets RCC_STGENCKSELR to value 0"]
impl crate::Resettable for RCC_STGENCKSELR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[allow(dead_code)]
#[derive(Debug)]
pub enum UrnError {
GenericDynamic(String),
Generic(&'static str),
AshError(ash::vk::Result),
AshInstanceError(ash::InstanceError),
NulError(std::ffi::NulError),
}
impl From<std::ffi::NulError> for UrnError {
fn from(e: std::ffi::NulError) -> UrnError {
UrnError::NulError(e)
}
}
impl From<ash::vk::Result> for UrnError {
fn from(e: ash::vk::Result) -> UrnError {
UrnError::AshError(e)
}
}
impl From<ash::InstanceError> for UrnError {
fn from(e: ash::InstanceError) -> UrnError {
UrnError::AshInstanceError(e)
}
}
|
/// This is a simple component.
#[derive(Debug)]
pub struct XYZ {
x: i32,
y: i32,
z: i32,
}
impl XYZ {
/// Create new `xyz` component.
pub fn new(x: i32, y: i32, z: i32) -> Self {
Self { x, y, z }
}
/// Set the x value.
pub fn set_x(&mut self, x: i32) {
self.x = x;
}
/// Set the y value.
pub fn set_y(&mut self, y: i32) {
self.y = y;
}
/// Set the z value.
pub fn set_z(&mut self, z: i32) {
self.z = z;
}
} |
use std::error::Error;
use std::fs;
use std::io::{self, Read};
pub mod config;
use config::Config;
pub mod matches;
use matches::Match;
extern crate termcolor;
use std::io::Write;
use termcolor::{ColorChoice, ColorSpec, StandardStream, WriteColor};
use atty::Stream;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn case_sensitive() {
let query = "duct";
let contents = "\
Rust:
safe, fast, productive.
Pick three.
Duct tape.";
let s: Vec<&str> = search(query, contents)
.iter()
.map(|a| a.line)
.collect();
assert_eq!(vec!["safe, fast, productive."], s);
}
#[test]
fn case_insensitive() {
let query = "rUsT";
let contents = "\
Rust:
safe, fast, productive.
Pick three.
Trust me.";
let s: Vec<&str> = search_case_insensitive(query, contents)
.iter()
.map(|a| a.line)
.collect();
assert_eq!(
vec!["Rust:", "Trust me."],
s
);
}
}
pub fn run(config: Config) -> Result<(), Box<dyn Error>> {
// 1) Read content to be searched
let content = read_content(&config)?;
// 2) Search the query in the specified content
let results = if config.case_sensitive {
search(&config.query, &content)
} else {
search_case_insensitive(&config.query, &content)
};
// 3) Print the lines from the content containing the query
print_results(&results, &config)?;
Ok(())
}
fn read_content(config: &Config) -> Result<String, io::Error> {
if config.filename.is_empty() {
// if no file is specified, the program will read
// from stdin until EOF
let mut buffer = String::new();
while io::stdin().read_to_string(&mut buffer)? != 0 {}
Ok(buffer)
} else {
fs::read_to_string(&config.filename)
}
}
fn print_results(results: &[Match], config: &Config) -> Result<(), Box<dyn Error>> {
let query_len = config.query.chars().count();
if atty::is(Stream::Stdout) {
// The current thread is being executed in a terminal
// Print the occurences with colors
let mut stdout = StandardStream::stdout(ColorChoice::Always);
let mut color_spec = ColorSpec::new();
color_spec.set_fg(config.color)
.set_bold(true)
.set_intense(true);
let mut start: usize = 0;
for m in results {
for found in &m.indexes {
write!(&mut stdout, "{}", &m.line[start .. *found])?;
// writes everything up to the matched query
stdout.set_color(&color_spec)?;
// sets the color to blue
write!(&mut stdout, "{}", &m.line[*found .. *found + query_len])?;
// writes the matched query
// sets the color back to default
stdout.reset()?;
start = *found + query_len;
}
writeln!(&mut stdout, "{}", &m.line[start ..])?;
start = 0;
}
} else {
// The current thread is not being executed in a terminal
// Don't print the occurrences with colors
for m in results {
println!("{}", m.line);
}
}
Ok(())
}
pub fn search<'a>(query: &str, content: &'a str) -> Vec<Match<'a>> {
let mut results = Vec::new();
for line in content.lines() {
let mut i = 0;
let mut curr_match: Option<Match> = None;
while let Some(mut index) = line[i..].find(query) {
index += i;
i = index + 1;
match curr_match {
Some(ref mut m) => {
m.indexes.push(index);
},
None => {
curr_match = Some(Match { line, indexes: vec![index] });
},
};
}
if let Some(m) = curr_match {
results.push(m);
}
}
results
}
pub fn search_case_insensitive<'a>(query: &str, content: &'a str) -> Vec<Match<'a>> {
let mut results = Vec::new();
let query = query.to_lowercase();
for line in content.lines() {
let mut i = 0;
let mut m: Option<Match> = None;
while let Some(mut index) = line.to_lowercase()[i..].find(&query) {
index += i;
i = index + 1;
match m {
Some(ref mut m) => {
m.indexes.push(index);
},
None => {
m = Some(Match { line, indexes: vec![index] });
},
};
}
if let Some(m) = m {
results.push(m);
}
}
results
}
|
use super::helpers::fixtures::{get_language, get_tags_config};
use crate::query_testing::{parse_position_comments, Assertion};
use crate::test_tags::get_tag_positions;
use tree_sitter::{Parser, Point};
use tree_sitter_tags::TagsContext;
#[test]
fn test_tags_test_with_basic_test() {
let language = get_language("python");
let config = get_tags_config("python");
let source = [
"# hi",
"def abc(d):",
" # <- definition.function",
" e = fgh(d)",
" # ^ reference.call",
" return d(e)",
" # ^ reference.call",
" # ^ !variable.parameter",
"",
]
.join("\n");
let assertions =
parse_position_comments(&mut Parser::new(), language, source.as_bytes()).unwrap();
assert_eq!(
assertions,
&[
Assertion::new(1, 4, false, String::from("definition.function")),
Assertion::new(3, 9, false, String::from("reference.call")),
Assertion::new(5, 11, false, String::from("reference.call")),
Assertion::new(5, 13, true, String::from("variable.parameter")),
]
);
let mut tags_context = TagsContext::new();
let tag_positions = get_tag_positions(&mut tags_context, &config, source.as_bytes()).unwrap();
assert_eq!(
tag_positions,
&[
(
Point::new(1, 4),
Point::new(1, 7),
"definition.function".to_string()
),
(
Point::new(3, 8),
Point::new(3, 11),
"reference.call".to_string()
),
(
Point::new(5, 11),
Point::new(5, 12),
"reference.call".to_string()
),
]
)
}
|
use std::{fs, str};
pub fn day3 () {
let example_input = fs::read_to_string("inputs/d3.example").unwrap();
let input = fs::read_to_string("inputs/d3").unwrap();
let example_lines: Vec<&str> = example_input
.lines()
.collect();
let lines : Vec<&str> = input
.lines()
.collect();
// part 1
let trees_p1 = calculate_encountered_trees(&lines, 3, 1);
println!("Part 1: {} trees encountered.", trees_p1);
// part 2
println!("Part 2:");
let slopes: Vec<(usize, usize)> = vec![(1, 1), (3, 1), (5, 1), (7, 1), (1, 2)];
// Ensure the algorithm works by testing it on the example values
let product_of_all_routes_example : usize = slopes.iter()
.map(|(right, down)| calculate_encountered_trees(&example_lines, *right, *down))
.map(|tree_encounters| {println!("{} trees encountered", tree_encounters); tree_encounters})
.product();
println!("All example tree encounters multiplied return: {}", product_of_all_routes_example);
// Now apply to the real input
let product_of_all_routes: usize = slopes.iter()
.map(|(right, down)| calculate_encountered_trees(&lines, *right, *down))
.map(|tree_encounters| {println!("{} trees encountered", tree_encounters); tree_encounters})
.product();
println!("All tree encounters multiplied returns: {}", product_of_all_routes);
}
fn calculate_encountered_trees(lines: &[&str], step_right: usize, step_down: usize) -> usize {
let single_line_len = lines[0].chars().count();
let mut trees = 0;
let mut pos_down = 0;
let mut pos_right = 0;
while pos_down < lines.len() {
let opt_symbol = lines[pos_down].chars().nth(pos_right % single_line_len);
if let Some(symbol) = opt_symbol {
if symbol == '#' {
trees += 1;
}
} else {
println!("Failed to access symbol for line {}", pos_down);
}
pos_down += step_down;
pos_right += step_right;
}
trees
} |
//! # Logic to read a PKI file from a byte stream
use std::convert::TryFrom;
use std::fmt;
use std::fs::File;
use std::io::{BufReader, Error as IoError, Read};
use std::path::Path;
use thiserror::Error;
use super::core::PackIndexFile;
use super::parser;
use nom::{self, error::ErrorKind, Err as NomErr};
#[derive(Debug, Error)]
/// Failed to load a PKI file
pub enum LoadError {
/// Failed to open the file
FileOpen(#[source] IoError),
/// Failed to read from the file
Read(#[source] IoError),
/// EOF while parsing
Incomplete,
/// File did not match parser
ParseError(ErrorKind),
/// Valid file but invalid data
ParseFailure(ErrorKind),
}
impl fmt::Display for LoadError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
LoadError::FileOpen(_) => write!(f, "Failed to open file"),
LoadError::Read(_) => write!(f, "Failed to read file"),
LoadError::Incomplete => write!(f, "Unexpected EOF"),
LoadError::ParseError(e) => write!(f, "File not recognized: {:?}", e),
LoadError::ParseFailure(e) => write!(f, "File corrupt: {:?}", e),
}
}
}
type LoadResult<T> = Result<T, LoadError>;
// Generates a LoadError from a nom error
impl From<NomErr<nom::error::Error<&[u8]>>> for LoadError {
fn from(e: NomErr<nom::error::Error<&[u8]>>) -> LoadError {
match e {
// Need to translate the error here, as this lives longer than the input
NomErr::Incomplete(_) => LoadError::Incomplete,
NomErr::Error(e) => LoadError::ParseError(e.code),
NomErr::Failure(e) => LoadError::ParseFailure(e.code),
}
}
}
impl TryFrom<&Path> for PackIndexFile {
type Error = LoadError;
fn try_from(filename: &Path) -> LoadResult<PackIndexFile> {
let file = File::open(filename).map_err(LoadError::FileOpen)?;
PackIndexFile::try_from(file)
}
}
impl TryFrom<&str> for PackIndexFile {
type Error = LoadError;
fn try_from(filename: &str) -> LoadResult<PackIndexFile> {
PackIndexFile::try_from(Path::new(filename))
}
}
impl TryFrom<File> for PackIndexFile {
type Error = LoadError;
fn try_from(file: File) -> LoadResult<PackIndexFile> {
let mut buf = BufReader::new(file);
let mut bytes: Vec<u8> = Vec::new();
buf.read_to_end(&mut bytes).map_err(LoadError::Read)?;
let (_rest, pki_file) = parser::parse_pki_file(&bytes)?;
Ok(pki_file)
}
}
|
fn add(x: f64, y: f64) -> f64 {
x + y
}
fn main() {
println!("Hello, world!");
println!("{}", add(3.0, 4.0))
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.