text stringlengths 8 4.13M |
|---|
// This file is part of linux-epoll. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. No part of linux-epoll, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2019 The developers of linux-epoll. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT.
/// SOCKS5 protocol failure error.
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Socks5ProtocolFailureError
{
/// Version was not `5` (actual value in tuple).
VersionInvalid(u8),
/// No acceptable authentication methods were supplied by the client.
NoAcceptableAuthenticationMethodsSupplied,
/// Credential code which was never sent by client (actual value in tuple).
CredentialCodeInReplyWasNeverSentByClient(Socks5AuthenticationCredentialCode),
/// User name was empty.
EmptyUserName,
/// Password was empty.
EmptyPassword,
/// Version was not `1` (actual value in tuple).
UserNamePasswordVersionInvalid(u8),
/// Failed; non-zero status code from the server is in the tuple.
UserNamePasswordAuthenticationFailed(u8),
/// General SOCKS5 server failure.
GeneralSocksServerFailure,
/// Connection not allowed by ruleset.
ConnectionNotAllowedByRuleset,
/// Network unreachable.
NetworkUnreachable,
/// Host unreachable.
HostUnreachable,
/// Connection refused.
ConnectionRefused,
/// Time to Live (TTL) expired.
TimeToLiveExpired,
/// Command not supported.
CommandNotSupported,
/// Address type not supported.
AddressTypeNotSupported,
/// Unassigned error (actual `rep` code is in tuple).
UnassignedError(u8),
/// The `RSV` field in the reply was not 0x00.
ReplyRsvFieldWasNotZero(u8),
/// The `ATYP` field in the reply was recognised.
ReplyContainedAnUnrecognisedAddressType(u8),
/// The address type `ATYP` was for a host name that was empty.
HostNameInReplyWasEmpty,
/// The address type `ATYP` was for a host name that was too large (the actualy size is in tuple).
HostNameInReplyWasTooLarge(u8),
}
|
// Copyright 2020-2021, The Tremor Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! # Blackhole benchmarking offramp
//!
//! Offramp used for benchmarking to generate latency histograms
//!
//! ## Configuration
//!
//! See [Config](struct.Config.html) for details.
#![cfg(not(tarpaulin_include))]
// This is OK, Blackhole is benchmark only
#![allow(clippy::cast_possible_truncation, clippy::cast_precision_loss)]
use crate::sink::prelude::*;
use halfbrown::HashMap;
use hdrhistogram::serialization::{Deserializer, Serializer, V2Serializer};
use hdrhistogram::Histogram;
use std::fmt::Display;
use std::io::{self, stdout, Read, Write};
use std::process;
use std::result;
use std::str;
#[derive(Deserialize, Debug, Clone)]
pub struct Config {
/// Number of seconds to collect data before the system is stopped.
pub stop_after_secs: u64,
/// Significant figures for the histogram
pub significant_figures: u64,
/// Number of seconds to warmup, events during this time are not
/// accounted for in the latency measurements
pub warmup_secs: u64,
}
impl ConfigImpl for Config {}
/// A null offramp that records histograms
pub struct Blackhole {
// config: Config,
stop_after: u64,
warmup: u64,
has_stop_limit: bool,
delivered: Histogram<u64>,
run_secs: f64,
bytes: usize,
count: u64,
buf: Vec<u8>,
}
impl offramp::Impl for Blackhole {
fn from_config(config: &Option<OpConfig>) -> Result<Box<dyn Offramp>> {
if let Some(config) = config {
let config: Config = Config::new(config)?;
let now_ns = nanotime();
Ok(SinkManager::new_box(Self {
// config: config.clone(),
run_secs: config.stop_after_secs as f64,
stop_after: now_ns + (config.stop_after_secs + config.warmup_secs) * 1_000_000_000,
warmup: now_ns + config.warmup_secs * 1_000_000_000,
has_stop_limit: config.stop_after_secs != 0,
delivered: Histogram::new_with_bounds(
1,
100_000_000_000,
config.significant_figures as u8,
)?,
bytes: 0,
count: 0,
buf: Vec::with_capacity(1024),
}))
} else {
Err("Blackhole offramp requires a config".into())
}
}
}
#[async_trait::async_trait]
impl Sink for Blackhole {
#[allow(clippy::too_many_arguments)]
async fn init(
&mut self,
_sink_uid: u64,
_sink_url: &TremorUrl,
_codec: &dyn Codec,
_codec_map: &HashMap<String, Box<dyn Codec>>,
_processors: Processors<'_>,
_is_linked: bool,
_reply_channel: Sender<sink::Reply>,
) -> Result<()> {
Ok(())
}
async fn on_event(
&mut self,
_input: &str,
codec: &mut dyn Codec,
_codec_map: &HashMap<String, Box<dyn Codec>>,
event: Event,
) -> ResultVec {
let now_ns = nanotime();
if self.has_stop_limit && now_ns > self.stop_after {
let mut buf = Vec::new();
let mut serializer = V2Serializer::new();
serializer.serialize(&self.delivered, &mut buf)?;
if quantiles(buf.as_slice(), stdout(), 5, 2).is_ok() {
println!(
"\n\nThroughput (data): {:.1} MB/s\nThroughput (events): {:.1}k events/s",
(self.bytes as f64 / self.run_secs) / (1024.0 * 1024.0),
(self.count as f64 / self.run_secs) / 1000.0
);
} else {
eprintln!("Failed to serialize histogram");
}
// ALLOW: This is on purpose, we use blackhole for benchmarking, so we want it to terminate the process when done
process::exit(0);
};
for value in event.value_iter() {
if now_ns > self.warmup {
let delta_ns = now_ns - event.ingest_ns;
if codec.encode_into(value, &mut self.buf).is_ok() {
self.bytes += self.buf.len();
} else {
error!("failed to encode");
};
self.count += 1;
self.buf.clear();
self.delivered.record(delta_ns)?;
}
}
Ok(None)
}
fn default_codec(&self) -> &str {
"null"
}
async fn on_signal(&mut self, _signal: Event) -> ResultVec {
Ok(None)
}
fn is_active(&self) -> bool {
true
}
fn auto_ack(&self) -> bool {
true
}
}
fn quantiles<R: Read, W: Write>(
mut reader: R,
mut writer: W,
quantile_precision: usize,
ticks_per_half: u32,
) -> Result<()> {
fn write_extra_data<T1: Display, T2: Display, W: Write>(
writer: &mut W,
label1: &str,
data1: T1,
label2: &str,
data2: T2,
) -> result::Result<(), io::Error> {
writer.write_all(
format!(
"#[{:10} = {:12.2}, {:14} = {:12.2}]\n",
label1, data1, label2, data2
)
.as_ref(),
)
}
let hist: Histogram<u64> = Deserializer::new().deserialize(&mut reader)?;
writer.write_all(
format!(
"{:>10} {:>quantile_precision$} {:>10} {:>14}\n\n",
"Value",
"Percentile",
"TotalCount",
"1/(1-Percentile)",
quantile_precision = quantile_precision + 2 // + 2 from leading "0." for numbers
)
.as_ref(),
)?;
let mut sum = 0;
for v in hist.iter_quantiles(ticks_per_half) {
sum += v.count_since_last_iteration();
if v.quantile_iterated_to() < 1.0 {
writer.write_all(
format!(
"{:12} {:1.*} {:10} {:14.2}\n",
v.value_iterated_to(),
quantile_precision,
// v.quantile(),
// quantile_precision,
v.quantile_iterated_to(),
sum,
1_f64 / (1_f64 - v.quantile_iterated_to()),
)
.as_ref(),
)?;
} else {
writer.write_all(
format!(
"{:12} {:1.*} {:10} {:>14}\n",
v.value_iterated_to(),
quantile_precision,
// v.quantile(),
// quantile_precision,
v.quantile_iterated_to(),
sum,
"inf"
)
.as_ref(),
)?;
}
}
write_extra_data(
&mut writer,
"Mean",
hist.mean(),
"StdDeviation",
hist.stdev(),
)?;
write_extra_data(&mut writer, "Max", hist.max(), "Total count", hist.len())?;
write_extra_data(
&mut writer,
"Buckets",
hist.buckets(),
"SubBuckets",
hist.distinct_values(),
)?;
Ok(())
}
|
/**
* DNA Auth Resolver zome
*
* Provides an API for foreign DNAs to register themselves with (this) local DNA.
*
* @see hc_zome_dna_auth_resolver_lib
*
* @package @holochain-open-dev/dna-auth-resolver
* @since 2021-03-18
*/
use hdk::prelude::*;
use hc_zome_dna_auth_resolver_rpc::*;
use hc_zome_dna_auth_resolver_storage::*;
/**
* Accept a request from some remote DNA to register an authenticated connection with the local DNA.
*
* Reads the zome properties to determine the (statically known) list of available external "permissions"
* for this DNA, and the internal zome & method names to which they correspond. These are passed into a
* newly created capability grant in the local DNA and stored to allow the authentication.
*/
#[hdk_extern]
fn register_dna(DnaRegistration { remote_dna, permission_id, secret }: DnaRegistration) -> ExternResult<ZomeCallCapGrant> {
let tag = get_tag_for_auth(&remote_dna, &permission_id);
// lookup assigned capability ID
let cap_fn_mapping: AvailableCapabilities = zome_info()?.properties.try_into()?;
let cap_fn = cap_fn_mapping.permissions.iter().find(|cap| { cap.extern_id == permission_id });
if None == cap_fn { return Err(WasmError::CallError(format!("no permission with ID {:?}", permission_id))); }
// create capability grant for the remote requestor, based on the `secret` they provided and the currently executing (local) agent
let mut assignees = HashSet::new();
assignees.insert(agent_info()?.agent_latest_pubkey);
let mut allowed_methods = HashSet::new();
allowed_methods.insert(cap_fn.unwrap().allowed_method.to_owned());
let cap_header = create_cap_grant(CapGrantEntry::new(
tag,
CapAccess::Assigned { secret, assignees },
allowed_methods,
))?;
// read capability grant back out to return it to the caller
let result = get(cap_header, GetOptions { strategy: GetStrategy::Latest })?;
let entry = try_entry_from_element(result.as_ref())?;
match entry.as_cap_grant() {
Some(CapGrant::RemoteAgent(grant)) => Ok(grant),
Some(_) => panic!("Wrong capability type assigned in create_cap_grant()! This should never happen."),
None => panic!("Consistency error storing capability grant! This should never happen."),
}
}
|
#[allow(unused_imports)]
use stringify::Stringify;
#[allow(unused_imports)]
use std::ffi::{CStr, CString};
#[test]
fn convert_to_cow_str_test() {
let string = "something".to_string();
assert_eq!(string.convert_to_str(), "something");
}
#[test]
fn convert_to_cstr_test() {
let string = "something".to_string();
let cstr = unsafe { CStr::from_ptr(CString::new("something").unwrap().as_ptr()) };
assert_eq!(string.convert_to_cstr(), cstr);
}
#[test]
fn convert_to_str_test() {
let string = "something".to_string();
assert_eq!(string.convert_to_str(), "something");
}
#[test]
fn convert_to_string_test() {
let string = "something".to_string();
assert_eq!(string.convert_to_string(), "something".to_string());
}
#[test]
fn convert_to_libc_char_test() {
let string = "something".to_string();
let libc_char1 = string.convert_to_libc_char();
let libc_char2 = CString::new("something".to_string()).unwrap().as_ptr();
assert_eq!(libc_char1, libc_char2);
}
|
use std::fmt;
#[derive(Clone, Debug, PartialEq)]
pub struct Interval {
start: usize,
end: usize,
}
impl Interval {
pub fn new(start: usize, end: usize) -> Interval {
Interval { start, end }
}
pub fn contains(&self, point: usize) -> bool {
self.start <= point && self.end >= point
}
pub fn contains_interval(&self, other: &Interval) -> bool {
self.start <= other.start && self.end >= other.end
}
pub fn set_end(&mut self, end: usize) {
self.end = end;
}
pub fn set_start(&mut self, start: usize) {
self.start = start;
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct IntCharSet {
intervals: Vec<Interval>,
pos: usize,
}
impl IntCharSet {
pub fn new() -> IntCharSet {
IntCharSet {
intervals: vec![],
pos: 0,
}
}
pub fn with_interval(interval: Interval) -> IntCharSet {
let mut set = IntCharSet::new();
set.intervals.push(interval);
set
}
pub fn with_intervals(intervals: Vec<Interval>) -> IntCharSet {
let mut set = IntCharSet::new();
intervals.iter().for_each(|i| set.intervals.push(i.clone()));
set
}
pub fn with_char(c: usize) -> IntCharSet {
IntCharSet::with_interval(Interval::new(c, c))
}
/// Returns the index of the interval that contains the character c,
/// None if there is no such interval
pub fn index(&self, c: usize) -> Option<usize> {
if self.intervals.is_empty() {
return None;
}
let mut start: usize = 0;
let mut end: usize = self.intervals.len() - 1;
while start <= end {
let check = (start + end) / 2;
let i: &Interval = &self.intervals[check];
if start == end {
if i.contains(c) {
return Some(start);
} else {
return None;
}
}
if c < i.start {
if check == 0 {
// FIXME
return None;
}
end = check - 1;
continue;
}
if c > i.end {
start = check + 1;
continue;
}
return Some(check);
}
return None;
}
pub fn add_set(&mut self, set: &IntCharSet) {
for interval in &set.intervals {
self.add_interval(interval);
}
}
pub fn add_interval(&mut self, interval: &Interval) {
let mut size = self.intervals.len();
let mut i: usize = 0;
while i < size {
let mut elem = self.intervals[i].clone();
if elem.end + 1 < interval.start {
i += 1;
continue;
}
if elem.contains_interval(interval) {
return;
}
if elem.start > interval.end + 1 {
self.intervals.insert(i, interval.clone());
return;
}
if interval.start < elem.start {
self.intervals[i].start = interval.start;
}
if interval.end <= elem.end {
return;
}
self.intervals[i].end = interval.end;
elem.end = interval.end;
i += 1;
// delete all x with x.contains( interval.end )
while i < size {
let x = self.intervals[i].clone();
if x.start > elem.end + 1 {
return;
}
if x.end > elem.end {
self.intervals[i].end = x.end;
}
self.intervals.remove(i);
size -= 1;
} // end while
return;
} // end loop
self.intervals.push(interval.clone());
}
pub fn add_char(&mut self, c: usize) {
let size = self.intervals.len();
for i in 0..size {
let elem = self.intervals[i].clone();
if elem.end + 1 < c {
continue;
}
if elem.contains(c) {
// already there, nothing todo
return;
}
if elem.start > c + 1 {
self.intervals.insert(i, Interval::new(c, c));
return;
}
if c + 1 == elem.start {
self.intervals[i].start = c;
return;
}
self.intervals[i].end = c;
if i + 1 >= size {
return;
}
let x = self.intervals[i + 1].clone();
if x.start <= c + 1 {
self.intervals[i].end = x.end;
self.intervals.remove(i + 1);
}
return;
}
// end reached but nothing found -> append at end
self.intervals.push(Interval::new(c, c))
}
pub fn contains(&self, c: usize) -> bool {
self.index(c).is_some()
}
pub fn is_empty(&self) -> bool {
self.intervals.is_empty()
}
pub fn intervals_len(&self) -> usize {
self.intervals.len()
}
pub fn get_intervals(&self) -> Vec<Interval> {
self.intervals.clone()
}
pub fn next_interval(&mut self) -> &Interval {
if self.pos == self.intervals.len() {
self.pos = 0;
}
assert!(self.intervals.len() > self.pos);
let interval = &self.intervals[self.pos];
self.pos += 1;
interval
}
pub fn and(&self, set: &IntCharSet) -> IntCharSet {
let mut result = IntCharSet::new();
let mut i = 0usize;
let mut k = 0usize;
let size = self.intervals.len();
let set_size = set.intervals.len();
while i < size && k < set_size {
let x = self.intervals[i].clone();
let y = set.intervals[k].clone();
if x.end < y.start {
i += 1;
continue;
}
if y.end < x.start {
k += 1;
continue;
}
use std::cmp;
let interval = Interval::new(cmp::max(x.start, y.start), cmp::min(x.end, y.end));
result.intervals.push(interval);
if x.end >= y.end {
k += 1;
}
if y.end >= x.end {
i += 1;
}
}
result
}
pub fn sub(&mut self, set: &IntCharSet) {
let mut i = 0usize;
let mut k = 0usize;
//println!("this - {}", self);
//println!("other - {}", set);
let set_size = set.intervals.len();
while i < self.intervals.len() && k < set_size {
let mut x = self.intervals[i].clone();
let y = set.intervals[k].clone();
if x.end < y.start {
i += 1;
continue;
}
if y.end < x.start {
k += 1;
continue;
}
if x.start == y.start && x.end == y.end {
self.intervals.remove(i);
k += 1;
continue;
}
if x.start == y.start {
x.start = y.end + 1;
self.intervals[i].start = x.start;
k += 1;
continue;
}
if x.end == y.end {
x.end = y.start - 1;
self.intervals[i].end = x.end;
i += 1;
k += 1;
continue;
}
self.intervals[i].start = y.end + 1;
self.intervals
.insert(i, Interval::new(x.start, y.start - 1));
i += 1;
k += 1;
}
/*
for i in 0..self.intervals.len() {
println!("Class {}", i);
println!(" {}", self.intervals[i]);
}
*/
}
}
#[derive(Clone, Debug)]
pub struct CharClassInterval {
pub start: usize,
pub end: usize,
pub char_class: usize,
}
impl CharClassInterval {
pub fn new(start: usize, end: usize, char_class: usize) -> CharClassInterval {
CharClassInterval {
start,
end,
char_class,
}
}
}
fn is_printable(c: usize) -> bool {
c > 31 && c < 127
}
impl fmt::Display for Interval {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "[")?;
if is_printable(self.start) {
write!(formatter, "'{}'", self.start as u8 as char)?;
} else {
write!(formatter, "{}", self.start)?;
}
if self.start != self.end {
write!(formatter, "-")?;
if is_printable(self.end) {
write!(formatter, "'{}'", self.end as u8 as char)?;
} else {
write!(formatter, "{}", self.end)?;
}
}
write!(formatter, "]")
}
}
impl fmt::Display for CharClassInterval {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_fmt(format_args!(
"[{}-{}={}]",
self.start, self.end, self.char_class
))
}
}
impl fmt::Display for IntCharSet {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{{ ")?;
for interval in &self.intervals {
write!(formatter, "{}", interval)?;
}
write!(formatter, " }}")
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct CharClasses {
classes: Vec<IntCharSet>,
max_char_used: usize,
}
impl CharClasses {
pub const MAX_CHAR: usize = 0x10FFFF;
pub fn new(max_char_code: usize) -> CharClasses {
if max_char_code > CharClasses::MAX_CHAR {
// TODO: panic
}
let mut classes: Vec<IntCharSet> = vec![];
classes.push(IntCharSet::with_interval(Interval::new(0, max_char_code)));
CharClasses {
max_char_used: max_char_code,
classes,
}
}
pub fn get_max_char_code(&self) -> usize {
self.max_char_used
}
pub fn set_max_char_code(&mut self, max_char_code: usize) {
self.max_char_used = max_char_code;
}
pub fn get_num_classes(&self) -> usize {
self.classes.len()
}
/// Updates the current partition, so that the specified set of characters gets a new character class.
pub fn make_class(&mut self, set: IntCharSet, caseless: bool) {
let mut set = set;
if caseless {
// TODO: set = set.get_caseless();
}
let old_size = self.classes.len();
for i in 0..old_size {
let mut x: IntCharSet = self.classes[i].clone();
if x == set {
return;
}
let and: IntCharSet = x.and(&set);
if !and.is_empty() {
if x == and {
set.sub(&and);
continue;
} else if set == and {
x.sub(&and);
self.classes[i] = x;
self.classes.push(and);
//println!("classes 1 - {:?}", self.classes);
return;
}
set.sub(&and);
x.sub(&and);
self.classes[i] = x;
self.classes.push(and);
//println!("classes 2 - {:?}", self.classes);
}
}
}
pub fn make_class_char(&mut self, c: usize, caseless: bool) {
self.make_class(IntCharSet::with_char(c), caseless)
}
pub fn make_class_str(&mut self, s: String, caseless: bool) {
for c in s.chars() {
self.make_class_char(c as usize, caseless);
}
}
pub fn make_class_intervals(&mut self, list: Vec<Interval>, caseless: bool) {
self.make_class(IntCharSet::with_intervals(list), caseless);
}
pub fn get_class_code(&self, code: usize) -> usize {
for i in 0..self.classes.len() {
let x = &self.classes[i];
if x.contains(code) {
return i;
}
}
assert!(false);
return 0;
}
pub fn get_class_code_from_int_char_set(&self, set: IntCharSet, negate: bool) -> Vec<usize> {
let size = self.classes.len();
let mut res: Vec<usize> = Vec::with_capacity(size);
for i in 0..size {
let x = self.classes[i].clone();
if negate {
if !(!set.and(&x).is_empty()) {
res.push(i);
}
} else {
if !set.and(&x).is_empty() {
res.push(i);
}
}
}
res
}
pub fn get_class_codes_from_interval(&self, list: Vec<Interval>, negate: bool) -> Vec<usize> {
self.get_class_code_from_int_char_set(IntCharSet::with_intervals(list), negate)
}
pub fn get_intervals(&mut self) -> Vec<CharClassInterval> {
let mut num_intervals = 0usize;
for i in 0..self.classes.len() {
num_intervals += self.classes[i].intervals_len();
}
let mut result: Vec<CharClassInterval> = Vec::with_capacity(num_intervals);
let mut c: usize = 0;
for _ in 0..num_intervals {
let code = self.get_class_code(c);
let iv: &Interval = self.classes[code].next_interval();
result.push(CharClassInterval::new(iv.start, iv.end, code));
c = iv.end + 1;
}
result
}
pub fn to_string_i(&self, index: usize) -> String {
assert!(index < self.classes.len());
format!("{}", self.classes[index])
}
}
impl fmt::Display for CharClasses {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "CharClasses:\n")?;
for i in 0..self.classes.len() {
write!(formatter, "class {}\n{}\n", i, self.classes[i])?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn interval_formatter() {
let i = Interval::new('a' as usize, 'z' as usize);
assert_eq!("['a'-'z']", format!("{}", i));
}
#[test]
fn int_char_set() {
let mut a: IntCharSet = IntCharSet::with_char(0);
a.add_char(3);
let original_a: IntCharSet = a.clone();
let b: IntCharSet = IntCharSet::with_interval(Interval::new(0, 4));
a.add_set(&b);
assert_eq!(format!("{}", original_a), "{ [0][3] }");
assert_eq!(format!("{}", b), "{ [0-4] }");
assert_eq!(format!("{}", a), "{ [0-4] }");
assert_eq!(a, b);
}
#[test]
fn char_class_interval_format() {
let i = CharClassInterval::new(0, 1, 10);
assert_eq!("[0-1=10]", format!("{}", i));
}
#[test]
fn char_classes_intervals() {
let mut char_classes = CharClasses::new(127);
let mut result: Vec<Interval> = vec![];
result.append(
IntCharSet::with_interval(Interval::new('a' as usize, 'z' as usize))
.get_intervals()
.as_mut(),
);
result.append(
IntCharSet::with_interval(Interval::new('A' as usize, 'Z' as usize))
.get_intervals()
.as_mut(),
);
char_classes.make_class_intervals(result.clone(), false);
println!("{}", char_classes);
}
}
|
#![no_main]
#![no_std]
use core::{
panic::PanicInfo,
sync::atomic::{compiler_fence, Ordering},
};
use cortex_m::asm;
use debouncer::{
typenum::{consts::*, Unsigned},
PortDebouncer,
};
use embedded_hal::digital::v2::OutputPin;
use heapless::spsc::{Consumer, Queue};
use keylib::{packets::AppCommand, PID, VID};
use rtic::app;
use stm32f1xx_hal::{
pac,
prelude::*,
timer::{CountDownTimer, Event, Timer},
usb::{Peripheral as UsbPeripheral, UsbBus, UsbBusType},
};
use usb_device::{bus, class::UsbClass, prelude::*};
#[macro_use]
mod loggy;
mod flash;
mod keyboard;
use flash::{ConfigWriter, FlashError};
use keyboard::{Keykey, Matrix};
type UsbType = UsbDevice<'static, UsbBus<UsbPeripheral>>;
type KeyboardType = Keykey<'static, 'static, UsbBus<UsbPeripheral>>;
pub type BtnsType = U3;
pub const NUM_BTS: usize = BtnsType::USIZE;
#[app(device = stm32f1xx_hal::pac, peripherals = true)]
const APP: () = {
struct Resources {
debouncer_timer: CountDownTimer<pac::TIM2>,
debouncer_handler: PortDebouncer<U8, BtnsType>,
usb_dev: UsbType,
keyboard: KeyboardType,
app_consumer: Consumer<'static, AppCommand, U8>,
matrix: Matrix,
writer: ConfigWriter,
}
#[init]
fn init(cx: init::Context) -> init::LateResources {
static mut USB_BUS: Option<bus::UsbBusAllocator<UsbBusType>> = None;
static mut Q: Queue<AppCommand, U8> = Queue(heapless::i::Queue::new());
let mut flash = cx.device.FLASH.constrain();
let mut rcc = cx.device.RCC.constrain();
let mut gpioa = cx.device.GPIOA.split(&mut rcc.apb2);
let clocks = rcc
.cfgr
.use_hse(8.mhz())
.sysclk(72.mhz())
.pclk1(36.mhz())
.freeze(&mut flash.acr);
init_log!();
assert!(clocks.usbclk_valid());
// buttons, in order: shoot, left, right
let _ = gpioa.pa0.into_pull_up_input(&mut gpioa.crl);
let _ = gpioa.pa1.into_pull_up_input(&mut gpioa.crl);
let _ = gpioa.pa2.into_pull_up_input(&mut gpioa.crl);
// Flash writer
let writer = ConfigWriter::new(flash).unwrap();
let matrix = writer.get_config().unwrap_or_else(Matrix::new);
// BluePill board has a pull-up resistor on the D+ line.
// Pull the D+ pin down to send a RESET condition to the USB bus.
// This forced reset is needed only for development, without it host
// will not reset your device when you upload new firmware.
let mut usb_dp = gpioa.pa12.into_push_pull_output(&mut gpioa.crh);
usb_dp.set_low().ok();
asm::delay(clocks.sysclk().0 / 100);
let usb_dm = gpioa.pa11;
let usb_dp = usb_dp.into_floating_input(&mut gpioa.crh);
let usb = UsbPeripheral {
usb: cx.device.USB,
pin_dm: usb_dm,
pin_dp: usb_dp,
};
*USB_BUS = Some(UsbBus::new(usb));
let (prod, cons) = Q.split();
let keyboard = Keykey::new(USB_BUS.as_ref().unwrap(), prod);
let usb_dev = UsbDeviceBuilder::new(USB_BUS.as_ref().unwrap(), UsbVidPid(VID, PID))
.manufacturer("Fake company")
.product("KeyKey")
.serial_number("TEST")
.build();
let mut timer2 =
Timer::tim2(cx.device.TIM2, &clocks, &mut rcc.apb1).start_count_down(200.hz());
timer2.listen(Event::Update);
log!("Init finished");
init::LateResources {
debouncer_timer: timer2,
debouncer_handler: PortDebouncer::new(16, 96),
usb_dev,
keyboard,
app_consumer: cons,
writer,
matrix,
}
}
#[idle]
fn idle(_cx: idle::Context) -> ! {
loop {
// This should change to `wfi` eventually, just leaving like this to ease development,
// since it can be a bit harder to attach to the chip during wfi
asm::nop();
}
}
#[task(binds = TIM2, priority = 2, resources = [debouncer_timer, debouncer_handler, keyboard, matrix, app_consumer, writer])]
fn debouncer_task(mut cx: debouncer_task::Context) {
cx.resources.debouncer_timer.clear_update_interrupt_flag();
if cx
.resources
.debouncer_handler
.update(!(unsafe { (*pac::GPIOA::ptr()).idr.read().bits() }))
{
let report = cx.resources.matrix.update(cx.resources.debouncer_handler);
cx.resources.keyboard.lock(|shared| {
if shared.set_keyboard_report(report.clone()) {
if shared.write(report.as_bytes()).is_err() {
log!("Error while sending report");
}
}
});
}
// Update the layout if needed
if let Some(cmd) = cx.resources.app_consumer.dequeue() {
let writer = cx.resources.writer;
if let Err(FlashError::FlashNotErased) = cx.resources.matrix.update_layout(cmd, writer)
{
// Something went wrong, erase the flash and try one more time
writer.write_default().unwrap();
cx.resources.matrix.update_layout(cmd, writer).unwrap();
}
}
}
#[task(binds = USB_LP_CAN_RX0, priority = 3, resources = [usb_dev, keyboard])]
fn usb(cx: usb::Context) {
if cx.resources.usb_dev.poll(&mut [cx.resources.keyboard]) {
cx.resources.keyboard.poll();
}
}
};
#[inline(never)]
#[panic_handler]
fn panic(info: &PanicInfo) -> ! {
cortex_m::interrupt::disable();
log!("{}", info);
loop {
compiler_fence(Ordering::SeqCst);
}
}
|
extern crate flow_rs;
use flow_rs::simulation::trader::Traders;
use flow_rs::controller::Controller;
use flow_rs::simulation::random_behavior::RandBehavior;
use std::sync::Arc;
pub fn main() {
// Initialize the new Trader struct
let traders = Arc::new(Traders::new());
// Initialize the dispatcher controller
let mut controller = Controller::new();
// Establish the async tasks to repeatedly send orders over tcp
let tcp_address = format!("127.0.0.1:5000");
let tcp_arrivals = RandBehavior::tcp_arrival_interval(Arc::clone(&traders), 500, tcp_address.clone());
let tcp_updates = RandBehavior::tcp_update_interval(Arc::clone(&traders), 1000, tcp_address.clone());
let tcp_cancels = RandBehavior::tcp_cancel_interval(Arc::clone(&traders), 2000, tcp_address.clone());
controller.push(tcp_arrivals);
controller.push(tcp_updates);
controller.push(tcp_cancels);
// Establish the async tasks to repeatedly send orders over websocket
env_logger::init();
let ws_address: &'static str = "ws://127.0.0.1:3015";
let ws_arrivals = RandBehavior::ws_arrival_interval(Arc::clone(&traders), 500, &ws_address);
let ws_updates = RandBehavior::ws_update_interval(Arc::clone(&traders), 1000, &ws_address);
let ws_cancels = RandBehavior::ws_cancel_interval(Arc::clone(&traders), 2000, &ws_address);
controller.push(ws_arrivals);
controller.push(ws_updates);
controller.push(ws_cancels);
// Start the controller which will asynchronously dispatch the git push
controller.run();
}
|
use aspect::Aspect;
use component::Component;
use entity::{Entity, EntityEditor, EntityManager};
pub trait Context {
fn create(&mut self) -> EntityEditor;
fn editor(&mut self, entity: Entity) -> EntityEditor;
fn is_alive(&self, entity: Entity) -> bool;
}
pub(crate) struct InternalContext<'a> {
entity_manager: &'a mut EntityManager
}
impl<'a> InternalContext<'a> {
pub fn new(entity_manager: &'a mut EntityManager) -> Self {
InternalContext {
entity_manager
}
}
pub fn get_entities<T: Aspect>(&self) -> Vec<Entity> {
self.entity_manager.entities::<T>()
}
}
impl<'a> Context for InternalContext<'a> {
fn create(&mut self) -> EntityEditor {
self.entity_manager.create()
}
fn editor(&mut self, entity: Entity) -> EntityEditor {
self.entity_manager.editor(entity)
}
fn is_alive(&self, entity: Entity) -> bool {
self.entity_manager.is_alive(entity)
}
} |
//! A module for reverse-word-pairs attempts.
pub mod attempt1;
pub mod attempt2;
use std::io::Write;
use std::fs::File;
use std::time::{Duration, Instant};
/// The trait that returns the current struct as milliseconds.
trait AsMilliseconds {
/// Returns the current struct as milliseconds.
fn as_msecs( &self ) -> u64;
}
/// The `AsMilliseconds` trait implementation for `std::time::Duration`.
impl AsMilliseconds for Duration {
/// Returns the current `std::time::Duration` as milliseconds.
fn as_msecs( &self ) -> u64 {
( self.as_secs() * 1000 ) + ( self.subsec_nanos() / 1_000_000 ) as u64
}
}
/// Flushes the standard console output.
fn flush_console() {
std::io::stdout().flush().unwrap();
}
/// The result of a single attempt
pub struct AttemptResult {
/// The number of found pairs
pub pairs : usize,
/// The runtime of the attempt in milliseconds
pub runtime : u64,
}
/// Runs a single attempt against a specified file name.
///
/// #Arguments
///
/// * `filename` - The name of the file to use for this attempt.
/// * `attempt` - The attempt function of type `Fn( &File ) -> usize`.
fn run_attempt<F>( filename : &str, attempt : &F ) -> AttemptResult
where F : Fn( &File ) -> usize {
let file = File::open( filename ).unwrap();
let now = Instant::now();
let pairs = attempt( &file );
let runtime = now.elapsed().as_msecs();
AttemptResult { pairs : pairs, runtime : runtime }
}
/// Runs a single attempt a given number of times and returns the average result.
///
/// This function also returns the amount of word pairs found. If between any
/// run the number of found pairs differs it will return Err( &str ).
///
/// # Arguments
///
/// * `times` - The number of times the attempt will be run.
/// * `filename` - The name of the file to use for this attempt.
/// * `attempt` - The attempt function of type `Fn( &File ) -> usize`.
pub fn run<F>( name : &str, times : u64, filename : &str, attempt : &F ) -> Result<AttemptResult, &'static str>
where F : Fn( &File ) -> usize {
let mut time : u64 = 0;
let mut count : usize = 0;
print!( "Running {}:", name );
flush_console();
for _ in 0..times {
print!( "." );
let result = run_attempt( filename, attempt );
time += result.runtime;
if count == 0 {
count = result.pairs;
} else if result.pairs != count {
println!( "FAILED" );
return Err( "Found pairs different from previous run." );
}
flush_console();
}
let runtime = time / times;
println!( "OK [Pairs: {}, Average: {}ms]", count, runtime );
Ok( AttemptResult { pairs: count, runtime : runtime } )
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::BufRead;
use std::io::Cursor;
use std::io::ErrorKind;
use std::io::Result;
pub trait ReadBytesExt {
fn peek(&self) -> Option<char>;
fn ignore(&mut self, f: impl Fn(u8) -> bool) -> bool;
fn ignores(&mut self, f: impl Fn(u8) -> bool) -> usize;
fn ignore_byte(&mut self, b: u8) -> bool;
fn ignore_bytes(&mut self, bs: &[u8]) -> bool;
fn ignore_insensitive_bytes(&mut self, bs: &[u8]) -> bool;
fn ignore_white_spaces(&mut self) -> bool;
fn until(&mut self, delim: u8, buf: &mut Vec<u8>) -> usize;
fn keep_read(&mut self, buf: &mut Vec<u8>, f: impl Fn(u8) -> bool) -> usize;
fn eof(&mut self) -> bool;
fn must_eof(&mut self) -> Result<()>;
fn must_ignore(&mut self, f: impl Fn(u8) -> bool) -> Result<()> {
if !self.ignore(f) {
return Err(std::io::Error::new(
ErrorKind::InvalidData,
"Expected to ignore a byte",
));
}
Ok(())
}
fn must_ignore_byte(&mut self, b: u8) -> Result<()>;
fn must_ignore_bytes(&mut self, bs: &[u8]) -> Result<()> {
if !self.ignore_bytes(bs) {
return Err(std::io::Error::new(
ErrorKind::InvalidData,
format!("Expected to have bytes {:?}", bs),
));
}
Ok(())
}
fn must_ignore_insensitive_bytes(&mut self, bs: &[u8]) -> Result<()> {
if !self.ignore_insensitive_bytes(bs) {
return Err(std::io::Error::new(
ErrorKind::InvalidData,
format!("Expected to have insensitive bytes {:?}", bs),
));
}
Ok(())
}
}
impl<T> ReadBytesExt for Cursor<T>
where T: AsRef<[u8]>
{
fn peek(&self) -> Option<char> {
let buf = self.remaining_slice();
if buf.is_empty() {
None
} else {
Some(buf[0] as char)
}
}
fn eof(&mut self) -> bool {
self.remaining_slice().is_empty()
}
fn must_eof(&mut self) -> Result<()> {
if !self.remaining_slice().is_empty() {
return Err(std::io::Error::new(
ErrorKind::InvalidData,
"Must reach the buffer end",
));
}
Ok(())
}
fn ignore(&mut self, f: impl Fn(u8) -> bool) -> bool {
let available = self.remaining_slice();
if available.is_empty() {
false
} else if f(available[0]) {
self.consume(1);
true
} else {
false
}
}
fn ignores(&mut self, f: impl Fn(u8) -> bool) -> usize {
let available = self.remaining_slice();
if available.is_empty() {
return 0;
}
for (index, byt) in available.iter().enumerate() {
if !f(*byt) {
self.consume(index);
return index;
}
}
let len = available.len();
self.consume(len);
len
}
fn ignore_byte(&mut self, b: u8) -> bool {
self.ignore(|c| c == b)
}
fn ignore_bytes(&mut self, bs: &[u8]) -> bool {
let available = self.remaining_slice();
let len = bs.len();
if available.len() < len {
return false;
}
let eq = available[..len].iter().zip(bs).all(|(x, y)| x == y);
if eq {
BufRead::consume(self, len);
}
eq
}
fn must_ignore_byte(&mut self, b: u8) -> Result<()> {
if !self.ignore_byte(b) {
return Err(std::io::Error::new(
ErrorKind::InvalidData,
format!(
"Expected to have char '{}', got '{:?}' at pos {}",
b as char,
self.peek(),
self.position()
),
));
}
Ok(())
}
fn ignore_insensitive_bytes(&mut self, bs: &[u8]) -> bool {
let available = self.remaining_slice();
let len = bs.len();
if available.len() < len {
return false;
}
let eq = available[..len]
.iter()
.zip(bs)
.all(|(x, y)| x.eq_ignore_ascii_case(y));
if eq {
BufRead::consume(self, len);
}
eq
}
fn ignore_white_spaces(&mut self) -> bool {
self.ignores(|c| c.is_ascii_whitespace()) > 0
}
fn until(&mut self, delim: u8, buf: &mut Vec<u8>) -> usize {
let remaining_slice = self.remaining_slice();
let to_read = match core::slice::memchr::memchr(delim, remaining_slice) {
None => buf.len(),
Some(n) => n + 1,
};
buf.extend_from_slice(&remaining_slice[..to_read]);
self.consume(to_read);
to_read
}
fn keep_read(&mut self, buf: &mut Vec<u8>, f: impl Fn(u8) -> bool) -> usize {
let remaining_slice = self.remaining_slice();
let mut to_read = remaining_slice.len();
for (i, b) in remaining_slice.iter().enumerate() {
if !f(*b) {
to_read = i;
break;
}
}
buf.extend_from_slice(&remaining_slice[..to_read]);
self.consume(to_read);
to_read
}
}
|
pub struct ProconReader<R: std::io::Read> {
reader: R,
}
impl<R: std::io::Read> ProconReader<R> {
pub fn new(reader: R) -> Self {
Self { reader }
}
pub fn get<T: std::str::FromStr>(&mut self) -> T {
use std::io::Read;
let buf = self
.reader
.by_ref()
.bytes()
.map(|b| b.unwrap())
.skip_while(|&byte| byte == b' ' || byte == b'\n' || byte == b'\r')
.take_while(|&byte| byte != b' ' && byte != b'\n' && byte != b'\r')
.collect::<Vec<_>>();
std::str::from_utf8(&buf)
.unwrap()
.parse()
.ok()
.expect("Parse Error.")
}
}
#[allow(dead_code)]
mod mint {
use std::ops::{Add, BitAnd, Div, Mul, Rem, Shr, Sub};
#[derive(Copy, Clone)]
pub struct Mint<T> {
x: T,
mo: T,
}
impl<T> Mint<T>
where
T: Copy,
{
pub fn new(x: T, mo: T) -> Mint<T> {
Mint { x, mo }
}
}
impl<T> Mint<T>
where
T: Copy,
{
pub fn val(&self) -> T {
self.x
}
pub fn mo(&self) -> T {
self.mo
}
}
impl<T> Add<T> for Mint<T>
where
T: Copy,
T: Add<Output = T>,
T: Rem<Output = T>,
{
type Output = Mint<T>;
fn add(self, rhs: T) -> Mint<T> {
Mint::new((self.val() + rhs % self.mo()) % self.mo(), self.mo())
}
}
impl<T> Add<Mint<T>> for Mint<T>
where
T: Copy,
Mint<T>: Add<T, Output = Mint<T>>,
{
type Output = Mint<T>;
fn add(self, rhs: Mint<T>) -> Mint<T> {
self + rhs.val()
}
}
impl<T> Sub<T> for Mint<T>
where
T: Copy,
T: Add<Output = T>,
T: Sub<Output = T>,
T: Rem<Output = T>,
{
type Output = Mint<T>;
fn sub(self, rhs: T) -> Mint<T> {
Mint::new(
(self.val() + self.mo() - rhs % self.mo()) % self.mo(),
self.mo(),
)
}
}
impl<T> Sub<Mint<T>> for Mint<T>
where
T: Copy,
Mint<T>: Sub<T, Output = Mint<T>>,
{
type Output = Mint<T>;
fn sub(self, rhs: Mint<T>) -> Mint<T> {
self - rhs.val()
}
}
impl<T> Mul<T> for Mint<T>
where
T: Copy,
T: Mul<Output = T>,
T: Rem<Output = T>,
{
type Output = Mint<T>;
fn mul(self, rhs: T) -> Mint<T> {
Mint::new((self.val() * rhs % self.mo()) % self.mo(), self.mo())
}
}
impl<T> Mul<Mint<T>> for Mint<T>
where
T: Copy,
Mint<T>: Mul<T, Output = Mint<T>>,
{
type Output = Mint<T>;
fn mul(self, rhs: Mint<T>) -> Mint<T> {
self * rhs.val()
}
}
impl<T> Mint<T>
where
T: Copy,
T: Sub<Output = T>,
T: Div<Output = T>,
T: PartialOrd,
T: PartialEq,
T: BitAnd<Output = T>,
T: Shr<Output = T>,
Mint<T>: Mul<Output = Mint<T>>,
{
pub fn pow(self, y: T) -> Mint<T> {
let one = self.mo() / self.mo();
let zero = self.mo() - self.mo();
let mut res = Mint::one(self.mo());
let mut base = self;
let mut exp = y;
while exp > zero {
if (exp & one) == one {
res = res * base;
}
base = base * base;
exp = exp >> one;
}
res
}
}
impl<T> Div<T> for Mint<T>
where
T: Copy,
T: Sub<Output = T>,
T: Div<Output = T>,
T: PartialOrd,
T: PartialEq,
T: BitAnd<Output = T>,
T: Shr<Output = T>,
Mint<T>: Mul<Output = Mint<T>>,
{
type Output = Mint<T>;
fn div(self, rhs: T) -> Mint<T> {
let one = self.mo() / self.mo();
self * Mint::new(rhs, self.mo()).pow(self.mo() - one - one)
}
}
impl<T> Div<Mint<T>> for Mint<T>
where
T: Copy,
Mint<T>: Div<T, Output = Mint<T>>,
{
type Output = Mint<T>;
fn div(self, rhs: Mint<T>) -> Mint<T> {
self / rhs.val()
}
}
impl<T> Mint<T>
where
T: Copy,
T: Div<Output = T>,
Mint<T>: Div<Output = Mint<T>>,
{
pub fn inv(self) -> Mint<T> {
Mint::one(self.mo()) / self
}
}
impl<T> std::fmt::Display for Mint<T>
where
T: Copy + std::fmt::Display,
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.val())
}
}
impl<T> Mint<T>
where
T: Copy,
T: Sub<Output = T>,
{
pub fn zero(mo: T) -> Mint<T> {
Mint { x: mo - mo, mo }
}
}
impl<T> Mint<T>
where
T: Copy,
T: Div<Output = T>,
{
pub fn one(mo: T) -> Mint<T> {
Mint { x: mo / mo, mo }
}
}
}
use mint::Mint;
pub struct UnionFind {
par: Vec<usize>,
size: Vec<usize>,
}
impl UnionFind {
pub fn new(n: usize) -> UnionFind {
UnionFind {
par: (0..n).map(|i| i).collect::<Vec<_>>(),
size: vec![1; n],
}
}
pub fn find(&mut self, i: usize) -> usize {
if self.par[i] == i {
self.par[i]
} else {
self.par[i] = self.find(self.par[i]);
self.par[i]
}
}
pub fn unite(&mut self, i: usize, j: usize) {
let i = self.find(i);
let j = self.find(j);
if i == j {
return;
}
let (i, j) = if self.size[i] >= self.size[j] {
(i, j)
} else {
(j, i)
};
self.par[j] = i;
self.size[i] += self.size[j];
}
pub fn same(&mut self, i: usize, j: usize) -> bool {
self.find(i) == self.find(j)
}
pub fn get_size(&mut self, i: usize) -> usize {
let p = self.find(i);
self.size[p]
}
}
#[derive(Debug, Clone)]
struct Edge {
from: usize,
to: usize,
cost: i64,
}
fn minimum_spanning_tree(n: usize, edges: &Vec<Edge>) -> Option<Vec<Edge>> {
let mut es = edges.clone();
es.sort_by(|a, b| a.cost.cmp(&b.cost));
let mut uf = UnionFind::new(n);
let result = es
.into_iter()
.filter(|e| {
if !uf.same(e.from, e.to) {
uf.unite(e.from, e.to);
true
} else {
false
}
})
.collect::<Vec<_>>();
if result.len() == n - 1 {
Some(result)
} else {
assert!(result.len() < n - 1);
None
}
}
fn dfs(g: &Vec<Vec<Edge>>, i: usize, p: usize, size: &mut Vec<usize>) {
size[i] = 1;
for e in &g[i] {
if e.to != p {
dfs(g, e.to, i, size);
size[i] += size[e.to];
}
}
}
fn main() {
let stdin = std::io::stdin();
let mut rd = ProconReader::new(stdin.lock());
let n: usize = rd.get();
let m: usize = rd.get();
let x: usize = rd.get();
let mut edges = vec![];
for _ in 0..m {
let u: usize = rd.get();
let v: usize = rd.get();
let w: i64 = rd.get();
edges.push(Edge {
from: u - 1,
to: v - 1,
cost: w,
});
}
let tree_edges = minimum_spanning_tree(n, &edges).unwrap();
let mut g = vec![vec![]; n];
for e in &tree_edges {
g[e.from].push(e.clone());
g[e.to].push(Edge {
from: e.to,
to: e.from,
..*e
});
}
let mut size = vec![0; n];
dfs(&g, 0, !0, &mut size);
let mo = 1_000_000_000 + 7;
let mut ans = Mint::zero(mo);
for i in 0..n {
for e in &g[i] {
if size[e.to] < size[i] {
ans = ans
+ Mint::new(size[e.to] * (n - size[e.to]), mo)
* Mint::new(x, mo).pow(e.cost as usize);
}
}
}
println!("{}", ans);
}
|
use std::rc::Rc;
use yew::prelude::*;
use yew_functional::*;
#[function_component(UseEffect)]
pub fn effect() -> Html {
let (counter, set_counter) = use_state(|| 0);
{
let counter = counter.clone();
use_effect(move || {
// Make a call to DOM API after component is rendered
yew::utils::document().set_title(&format!("You clicked {} times", counter));
// Perform the cleanup
|| yew::utils::document().set_title("You clicked 0 times")
});
}
let onclick = {
let counter = Rc::clone(&counter);
Callback::from(move |_| set_counter(*counter + 1))
};
html! {
<button onclick=onclick>{ format!("Increment to {}", counter) }</button>
}
}
|
use std::net::IpAddr;
pub const ports: [u16; 9] = [
6881, 6882, 6883, 884, 6885, 6886, 6887, 6888, 6889
];
enum TrackerEvent {
Started,
Completed,
Stopped,
Empty,
}
pub struct TrackerRequst {
pub info_hash: [u8; 20],
pub peer_id: [u8; 20],
pub ip: IpAddr,
pub port: u16,
pub uploaded: usize,
pub downloaded: usize,
pub left: usize,
pub event: TrackerEvent,
}
pub struct Peer {
pub peer_id: [u8; 20],
pub ip: IpAddr,
pub port: u16,
}
pub struct TrackerResponse {
pub failure_reason: String,
pub interval: usize,
pub peers: Vec<Peer>,
} |
use std::mem;
use std::io::{self, Write, Seek, Read};
use std::fmt;
use std::error::Error;
use byteorder::{LittleEndian, WriteBytesExt};
use crate::btex::{BTexTextureInfo};
pub struct BTexWriter<'w, W> where W: Write + Seek {
writer: &'w mut W,
}
impl<'w, W> BTexWriter<'w, W> where W: Write + Seek {
// pub fn write<'b, P, F>(&'b mut self, texture_info: &'b BTexTextureInfo, data_provider: F) -> Result<(), WriteError>
// where P: 'b + PixelDataSource, F: Fn(ImageSourceIndex) -> Option<&'b mut P> {
pub fn write<'b, R>(&'b mut self, texture_info: &'_ BTexTextureInfo, pixel_data_sources: &'_ mut [Option<&'_ mut PixelDataSource<'_, R>>]) -> Result<(), WriteError> where R: Read {
fn conv_io_error<T>(result: Result<T, io::Error>) -> Result<T, WriteError> {
result.map_err(|e| WriteError::IoError(e))
}
// Write magic number
conv_io_error(self.writer.write_all(&[b'b', b't', b'e', b'x']))?;
// Calc offset table length
let num_images = texture_info.levels * texture_info.layers;
let offset_table_length = num_images * 2 * mem::size_of::<u64>() as u32;
// Write header info
let header_version = 1;
let header_length = 64;
conv_io_error(self.writer.write_u32::<LittleEndian>(header_version))?; // version
conv_io_error(self.writer.write_u32::<LittleEndian>(header_length))?; // header_length
conv_io_error(self.writer.write_u32::<LittleEndian>(offset_table_length))?; // offset_table_length
// Write texture info
conv_io_error(self.writer.write_u32::<LittleEndian>(texture_info.attributes))?; // attributes
conv_io_error(self.writer.write_u32::<LittleEndian>(texture_info.width))?; // width
conv_io_error(self.writer.write_u32::<LittleEndian>(texture_info.height))?; // height
conv_io_error(self.writer.write_u32::<LittleEndian>(texture_info.depth))?; // depth
conv_io_error(self.writer.write_u32::<LittleEndian>(texture_info.levels))?; // levels
conv_io_error(self.writer.write_u32::<LittleEndian>(texture_info.layers))?; // layers
// Serialize image format
let mut image_format_buffer = [0 as u8; 16];
image_format_buffer.copy_from_slice(&texture_info.image_format.name.as_bytes()[0..8]);
// Write image format str
conv_io_error(self.writer.write_all(&image_format_buffer))?; // image_format
// Write the offset table
let is_sparse_texture = texture_info.is_sparse();
let mut running_offset = (header_length + offset_table_length) as u64;
for source in pixel_data_sources.iter_mut() {
let data_length = if let Some(source) = source {
source.data_length
} else {
if !is_sparse_texture {
// Omitted image eventhough the texture is non-sparse
return Err(WriteError::IllegallyOmittedImage);
} else {
// If image is legally omitted, it has size zero
0
}
};
// Serialize offset entry
conv_io_error(self.writer.write_u64::<LittleEndian>(running_offset))?; // offset
conv_io_error(self.writer.write_u64::<LittleEndian>(data_length))?; // length
// Accumulate running offset
running_offset += data_length;
}
// Write the image data
for source in pixel_data_sources.iter_mut() {
if let Some(source) = source {
// Copy the data from the source to the writer
let mut capped_reader = source.source.take(source.data_length);
let written = io::copy(&mut capped_reader, &mut self.writer).map_err(|e| WriteError::PixelSourceIoError(e))?;
// Check if enough was written
if written < source.data_length {
return Err(WriteError::NotEnoughPixelData);
}
} else {
if !is_sparse_texture {
// Omitted image eventhough the texture is non-sparse
return Err(WriteError::IllegallyOmittedImage);
} else {
// Image is legally omitted, so write nothing
}
};
}
// Everything written successfully, return Ok
Ok(())
}
}
#[derive(Debug)]
pub enum WriteError {
IoError(io::Error),
PixelSourceIoError(io::Error),
IllegallyOmittedImage,
NotEnoughPixelData,
}
impl Error for WriteError {}
impl fmt::Display for WriteError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
<Self as fmt::Debug>::fmt(self, f)
}
}
//#[derive(Copy, Clone)]
//pub struct ImageSourceIndex {
// pub level: u32,
// pub layer: u32,
//}
pub struct PixelDataSource<'a, R> where R: Read {
pub data_length: u64,
pub source: &'a mut R,
}
|
//! Tests the memory model.
use crate::PerfModelTest;
use telamon::device::{ArgMap, Context};
use telamon::helper::{Builder, Reduce, SignatureBuilder};
use telamon::ir;
use telamon::search_space::{Action, DimKind, InstFlag, Order};
/// Tests the model in presence of global access replay.
pub struct L1LinesPressure;
impl L1LinesPressure {
const N: u32 = 100;
}
impl PerfModelTest for L1LinesPressure {
fn name() -> &'static str {
"l1_lines_pressure"
}
fn gen_signature<'a, AM: ArgMap<'a> + Context>(builder: &mut SignatureBuilder<AM>) {
builder.scalar("n", Self::N as i32);
builder.array::<f32>("array", 128 * 32 * 32 * 32);
builder.array::<f32>("out", 1);
}
fn gen_function(builder: &mut Builder) -> Self {
const UNROLL: u32 = 128;
const THREAD_X: u32 = 32;
const THREAD_Y: u32 = 32;
const STRIDE: u32 = 32;
let t = ir::Type::F(32);
let size_n = builder.param_size("n", Self::N);
let d1_0 = builder.open_dim_ex(ir::Size::new_const(THREAD_Y), DimKind::THREAD);
let d2_0 = builder.open_dim_ex(ir::Size::new_const(THREAD_X), DimKind::THREAD);
let init = builder.mov(&0f32);
let d0 = builder.open_dim_ex(size_n.clone(), DimKind::LOOP);
let d1_1 = builder.open_mapped_dim(&d1_0);
let d2_1 = builder.open_mapped_dim(&d2_0);
let d3 = builder.open_dim_ex(ir::Size::new_const(UNROLL), DimKind::UNROLL);
let strides = vec![
(&d3, ir::Size::new_const(THREAD_Y * THREAD_X * 32 * 4)),
(&d1_1, ir::Size::new_const(THREAD_X * 32 * 4)),
(&d2_1, ir::Size::new_const(STRIDE * 4)),
];
let pattern = builder.tensor_access_pattern(None, strides.clone());
let addr = builder.induction_var(&"array", strides);
let val = builder.ld_ex(t, &addr, pattern, InstFlag::CACHE_GLOBAL);
let acc = builder.add(&val, &Reduce(init));
builder.close_dim(&d0);
builder.close_dim(&d3);
let d1_2 = builder.open_mapped_dim(&d1_1)[0];
let d2_2 = builder.open_mapped_dim(&d2_1)[0];
let out_pattern = ir::AccessPattern::Unknown(None);
builder.st_ex(&"out", &acc, true, out_pattern, InstFlag::NO_CACHE);
builder.order(&d1_0, &d2_0, Order::OUTER);
builder.order(&d1_0, &d0, Order::BEFORE);
builder.order(&d0, &d1_1, Order::OUTER);
builder.order(&d1_1, &d2_1, Order::OUTER);
builder.order(&d0, &d1_2, Order::BEFORE);
builder.order(&d1_2, &d2_2, Order::OUTER);
L1LinesPressure
}
}
/// Tests the model in presence of global access replay.
pub struct L2LinesPressure;
impl L2LinesPressure {
const N: u32 = 100;
}
impl PerfModelTest for L2LinesPressure {
fn name() -> &'static str {
"l2_lines_pressure"
}
fn gen_signature<'a, AM: ArgMap<'a> + Context>(builder: &mut SignatureBuilder<AM>) {
builder.scalar("n", Self::N as i32);
builder.array::<f32>("array", 128 * 32 * 32 * 8);
builder.array::<f32>("out", 1);
}
fn gen_function(builder: &mut Builder) -> Self {
const UNROLL: u32 = 128;
const THREAD_X: u32 = 32;
const THREAD_Y: u32 = 32;
const STRIDE: u32 = 8;
let t = ir::Type::F(32);
let size_n = builder.param_size("n", Self::N);
let d1_0 = builder.open_dim_ex(ir::Size::new_const(THREAD_Y), DimKind::THREAD);
let d2_0 = builder.open_dim_ex(ir::Size::new_const(THREAD_X), DimKind::THREAD);
let init = builder.mov(&0f32);
let d0 = builder.open_dim_ex(size_n.clone(), DimKind::LOOP);
let d1_1 = builder.open_mapped_dim(&d1_0);
let d2_1 = builder.open_mapped_dim(&d2_0);
let d3 = builder.open_dim_ex(ir::Size::new_const(UNROLL), DimKind::UNROLL);
let strides = vec![
(&d3, ir::Size::new_const(THREAD_Y * THREAD_X * 8 * 4)),
(&d1_1, ir::Size::new_const(THREAD_X * 8 * 4)),
(&d2_1, ir::Size::new_const(STRIDE * 4)),
];
let pattern = builder.tensor_access_pattern(None, strides.clone());
let addr = builder.induction_var(&"array", strides);
let val = builder.ld_ex(t, &addr, pattern, InstFlag::CACHE_GLOBAL);
let acc = builder.add(&val, &Reduce(init));
builder.close_dim(&d0);
builder.close_dim(&d3);
let d1_2 = builder.open_mapped_dim(&d1_1)[0];
let d2_2 = builder.open_mapped_dim(&d2_1)[0];
let out_pattern = ir::AccessPattern::Unknown(None);
builder.st_ex(&"out", &acc, true, out_pattern, InstFlag::NO_CACHE);
builder.order(&d1_0, &d2_0, Order::OUTER);
builder.order(&d1_0, &d0, Order::BEFORE);
builder.order(&d0, &d1_1, Order::OUTER);
builder.order(&d1_1, &d2_1, Order::OUTER);
builder.order(&d0, &d1_2, Order::BEFORE);
builder.order(&d1_2, &d2_2, Order::OUTER);
L2LinesPressure
}
}
pub struct SharedLoad {
d0: ir::DimId,
d1: ir::DimId,
d2: ir::DimId,
d3: ir::DimId,
}
impl SharedLoad {
const N: u32 = 1_000;
}
impl PerfModelTest for SharedLoad {
fn name() -> &'static str {
"shared_load"
}
fn gen_signature<'a, AM: ArgMap<'a> + Context>(builder: &mut SignatureBuilder<AM>) {
builder.scalar("n", Self::N as i32);
builder.scalar("arg_zero", 0i32);
builder.array::<f32>("out", 1);
}
fn gen_function(builder: &mut Builder) -> Self {
let size_0 = builder.cst_size(32);
let size_1 = builder.cst_size(32);
let size_2 = builder.param_size("n", Self::N);
let mem = builder.allocate_shared(8 * 32 * 32 * 4);
let d0 = builder.open_dim_ex(size_0, DimKind::THREAD);
let d1 = builder.open_dim_ex(size_1, DimKind::THREAD);
let (ptr_0, pattern) =
builder.tensor_access(&mem, mem.into(), ir::Type::F(64), &[&d1, &d0]);
let ptr_1 = builder.mov(&ptr_0);
let ptr_to_mem_type = ir::Type::PtrTo(mem);
let ptr_zero = builder.cast(&"arg_zero", ptr_to_mem_type);
let acc_0 = builder.mov(&0f32);
let d2 = builder.open_dim_ex(size_2, DimKind::LOOP);
let d3_size = builder.cst_size(100);
let d3 = builder.open_dim_ex(d3_size, DimKind::UNROLL);
let ptr = builder.add(&Reduce(ptr_1), &ptr_zero);
let ld = builder.ld(ir::Type::F(32), &ptr, pattern);
let acc = builder.add(&Reduce(acc_0), &ld);
builder.close_dim(&d2);
builder.close_dim(&d3);
let out_pattern = ir::AccessPattern::Unknown(None);
builder.st_ex(&"out", &acc, true, out_pattern, InstFlag::NO_CACHE);
SharedLoad {
d0: d0[0],
d1: d1[0],
d2: d2[0],
d3: d3[0],
}
}
fn get_actions(&self) -> Vec<Action> {
vec![
Action::Order(self.d0.into(), self.d1.into(), Order::OUTER),
Action::Order(self.d1.into(), self.d2.into(), Order::OUTER),
Action::Order(self.d2.into(), self.d3.into(), Order::OUTER),
]
}
}
pub struct VectorSharedLoad {
d0: ir::DimId,
d1: ir::DimId,
d2: ir::DimId,
d3: ir::DimId,
}
impl VectorSharedLoad {
const N: u32 = 1_000;
}
impl PerfModelTest for VectorSharedLoad {
fn name() -> &'static str {
"vector_shared_load"
}
fn gen_signature<'a, AM: ArgMap<'a> + Context>(builder: &mut SignatureBuilder<AM>) {
builder.scalar("n", Self::N as i32);
builder.scalar("arg_zero", 0i32);
builder.array::<f32>("out", 1);
}
fn gen_function(builder: &mut Builder) -> Self {
let size_0 = builder.cst_size(32);
let size_1 = builder.cst_size(32);
let size_2 = builder.param_size("n", Self::N);
let mem = builder.allocate_shared(64 * 4 * 4);
let d0 = builder.open_dim_ex(size_0, DimKind::THREAD);
let d1 = builder.open_dim_ex(size_1, DimKind::THREAD);
let acc_0 = builder.mov(&0f32);
let d2 = builder.open_dim_ex(size_2, DimKind::LOOP);
let d3 = builder.open_dim_ex(ir::Size::new_const(64), DimKind::UNROLL);
let d4 = builder.open_dim_ex(ir::Size::new_const(4), DimKind::VECTOR);
let (addr, pattern) =
builder.tensor_access(&mem, mem.into(), ir::Type::F(32), &[&d3, &d4]);
let ld = builder.ld(ir::Type::F(32), &addr, pattern);
let d4_2 = builder.open_mapped_dim(&d4);
let acc = builder.add(&Reduce(acc_0), &ld);
builder.close_dim(&d2);
builder.close_dim(&d3);
builder.close_dim(&d4_2);
let out_pattern = ir::AccessPattern::Unknown(None);
builder.st_ex(&"out", &acc, true, out_pattern, InstFlag::NO_CACHE);
VectorSharedLoad {
d0: d0[0],
d1: d1[0],
d2: d2[0],
d3: d3[0],
}
}
fn get_actions(&self) -> Vec<Action> {
vec![
Action::Order(self.d0.into(), self.d1.into(), Order::OUTER),
Action::Order(self.d1.into(), self.d2.into(), Order::OUTER),
Action::Order(self.d2.into(), self.d3.into(), Order::OUTER),
]
}
}
pub struct SharedReplay;
impl SharedReplay {
const N: u32 = 1_000;
}
impl PerfModelTest for SharedReplay {
fn name() -> &'static str {
"shared_replay"
}
fn gen_signature<'a, AM: ArgMap<'a> + Context>(builder: &mut SignatureBuilder<AM>) {
builder.scalar("n", Self::N as i32);
builder.scalar("arg_zero", 0i32);
builder.array::<f32>("out", 1);
}
fn gen_function(builder: &mut Builder) -> Self {
let size_0 = builder.cst_size(32);
let size_1 = builder.cst_size(32);
let size_2 = builder.param_size("n", Self::N);
let mem = builder.allocate_shared(8 * 32 * 32 * 4);
let d0 = builder.open_dim_ex(size_0, DimKind::THREAD);
let d1 = builder.open_dim_ex(size_1, DimKind::THREAD);
let ptr_to_mem_type = ir::Type::PtrTo(mem);
let ptr_zero = builder.cast(&"arg_zero", ptr_to_mem_type);
let init = builder.mov(&0f32);
let (addr_0, pattern) =
builder.tensor_access(&mem, mem.into(), ir::Type::F(64), &[&d0, &d1]);
let addr_1 = builder.mov(&addr_0);
let d2 = builder.open_dim_ex(size_2, DimKind::LOOP);
let d4 = builder.open_dim_ex(ir::Size::new_const(32), DimKind::UNROLL);
let d3_0 = builder.open_dim_ex(ir::Size::new_const(4), DimKind::UNROLL);
let addr = builder.add(&Reduce(addr_1), &ptr_zero);
let val = builder.ld(ir::Type::F(32), &addr, pattern);
let d3_1 = builder.open_mapped_dim(&d3_0);
let acc = builder.add(&val, &Reduce(init));
builder.close_dim(&d2);
builder.close_dim(&d4);
builder.close_dim(&d3_1);
let out_pattern = ir::AccessPattern::Unknown(None);
builder.st_ex(&"out", &acc, true, out_pattern, InstFlag::NO_CACHE);
builder.order(&d0, &d1, Order::OUTER);
builder.order(&d1, &d2, Order::OUTER);
builder.order(&d4, &d3_0, Order::OUTER);
builder.order(&d3_0, &d3_1, Order::BEFORE);
builder.order(&ptr_zero, &init, Order::BEFORE);
SharedReplay
}
}
pub struct VectorSharedReplay;
impl VectorSharedReplay {
const N: u32 = 1_000;
}
impl PerfModelTest for VectorSharedReplay {
fn name() -> &'static str {
"vector_shared_replay"
}
fn gen_signature<'a, AM: ArgMap<'a> + Context>(builder: &mut SignatureBuilder<AM>) {
builder.scalar("n", Self::N as i32);
builder.scalar("arg_zero", 0i32);
builder.array::<f32>("out", 1);
}
fn gen_function(builder: &mut Builder) -> Self {
let size_0 = builder.cst_size(32);
let size_1 = builder.cst_size(32);
let size_2 = builder.param_size("n", Self::N);
let mem_size = 8 * 32 * 32 * 4;
let mem = builder.allocate_shared(mem_size);
let d0 = builder.open_dim_ex(size_0, DimKind::THREAD);
let d1 = builder.open_dim_ex(size_1, DimKind::THREAD);
let ptr_to_mem_type = ir::Type::PtrTo(mem);
let ptr_zero = builder.cast(&"arg_zero", ptr_to_mem_type);
let init = builder.mov(&0f32);
let idx = builder.mad(&d0, &32i32, &d1);
let addr_0 = builder.mad(&idx, &16i32, &mem);
let d2 = builder.open_dim_ex(size_2, DimKind::LOOP);
let d4 = builder.open_dim_ex(ir::Size::new_const(32), DimKind::UNROLL);
let addr = builder.add(&Reduce(addr_0), &ptr_zero);
let d3_0 = builder.open_dim_ex(ir::Size::new_const(4), DimKind::VECTOR);
let pattern = builder.tensor_access_pattern(
mem.into(),
vec![
(&d0, ir::Size::new_const(4 * 4 * 32)),
(&d1, ir::Size::new_const(4 * 4)),
(&d3_0, ir::Size::new_const(4)),
],
);
let val = builder.ld(ir::Type::F(32), &addr, pattern);
let d3_1 = builder.open_mapped_dim(&d3_0);
let acc = builder.add(&val, &Reduce(init));
builder.close_dim(&d2);
builder.close_dim(&d4);
builder.close_dim(&d3_1);
let out_pattern = ir::AccessPattern::Unknown(None);
builder.st_ex(&"out", &acc, true, out_pattern, InstFlag::NO_CACHE);
builder.order(&d0, &d1, Order::OUTER);
builder.order(&d1, &d2, Order::OUTER);
builder.order(&d4, &d3_0, Order::OUTER);
builder.order(&d3_0, &d3_1, Order::BEFORE);
builder.order(&ptr_zero, &init, Order::BEFORE);
VectorSharedReplay
}
}
// TODO(test): mutlidimentsional global tensor without pressure.
// TODO(test): test RAM bandwidth.
|
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::Path;
use std::collections::HashMap;
use euclid::*;
enum AoC {}
fn lines_from_file(filename: impl AsRef<Path>) -> Vec<String> {
let file = File::open(filename).expect("no such file");
let buf = BufReader::new(file);
buf.lines()
.map(|l| l.expect("Could not parse line"))
.collect()
}
fn count_trees(t: Vector2D<i32, AoC>, grid: &HashMap<Point2D<i32, AoC>, bool>, grid_dim: Size2D<i32, AoC>) -> u32 {
let mut trees: u32 = 0;
let mut p = point2(0, 0);
while p.y < grid_dim.height {
p += t;
// Wrap around x coord
p.x %= grid_dim.width;
if grid.contains_key(&p) {
trees += 1
}
}
return trees;
}
fn main() {
let lines = lines_from_file("input.txt");
let grid_dim: Size2D<i32, AoC> = size2(lines.get(0).unwrap().len() as i32, lines.len() as i32);
// for each line, convert trees into points
let mut grid: HashMap<Point2D<i32, AoC>, bool> = HashMap::new();
let mut y = 0;
for line in lines {
let mut x = 0;
for c in line.chars() {
if c == '#' {
grid.insert(point2(x, y), true);
}
x += 1;
}
y += 1;
}
let ts = vec!(
vec2(1, 1),
vec2(3, 1),
vec2(5, 1),
vec2(7, 1),
vec2(1, 2)
);
let r : Vec<u32> = ts.into_iter().map(|t| count_trees(t, &grid, grid_dim)).collect();
let part1 = r.get(1).unwrap();
let acc : u32 = r.iter().product();
dbg!(part1, acc);
}
|
fn main() {
println!(" i8 has the min value of {}.", i8::min_value());
println!(" i8 has the max value of {}.", i8::max_value());
println!(" i16 has the min value of {}.", i16::min_value());
println!(" i16 has the max value of {}.", i16::max_value());
println!(" i32 has the min value of {}.", i32::min_value());
println!(" i32 has the max value of {}.", i32::max_value());
println!(" i64 has the min value of {}.", i64::min_value());
println!(" i64 has the max value of {}.", i64::max_value());
println!(" i128 has the min value of {}.", i128::min_value());
println!(" i128 has the max value of {}.", i128::max_value());
println!("----------------------------------------------");
println!(" i8 has the min value of {}.", u8::min_value());
println!(" i8 has the max value of {}.", u8::max_value());
println!(" i16 has the min value of {}.", u16::min_value());
println!(" i16 has the max value of {}.", u16::max_value());
println!(" i32 has the min value of {}.", u32::min_value());
println!(" i32 has the max value of {}.", u32::max_value());
println!(" i64 has the min value of {}.", u64::min_value());
println!(" i64 has the max value of {}.", u64::max_value());
println!(" i128 has the min value of {}.", u128::min_value());
println!(" i128 has the max value of {}.", u128::max_value());
} |
use super::Mbc;
use super::MbcInfo;
#[derive(Debug,Copy,Clone)]
struct Rtc {
rtc_seconds: u8,
rtc_minutes: u8,
rtc_hours: u8,
rtc_days_low: u8,
rtc_days_high: u8,
}
#[derive(Debug)]
pub struct Mbc3 {
ram_write_protected: bool,
rom_bank: u8,
ram_bank: u8,
rtc_latch: u8,
rtc: Rtc,
latched_rtc: Rtc,
rom_offset: usize,
ram_offset: usize,
ram: Box<[u8]>,
}
impl Mbc3 {
pub fn new(mbc_info: MbcInfo, ram: Option<Box<[u8]>>) -> Mbc3 {
let ram = if let Some(ram_info) = mbc_info.ram_info {
ram_info.make_ram(ram)
} else {
vec![0; 0].into_boxed_slice()
};
let rtc = Rtc {
rtc_seconds: 0,
rtc_minutes: 0,
rtc_hours: 0,
rtc_days_low: 0,
rtc_days_high: 0,
};
Mbc3 {
ram_write_protected: true,
rom_bank: 0,
ram_bank: 0,
rtc_latch: 0,
rtc: rtc,
latched_rtc: rtc,
rom_offset: 0,
ram_offset: 0,
ram: ram,
}
}
fn update_rom_offset(&mut self) {
let bank = if self.rom_bank == 0 {
1
} else {
self.rom_bank & 0x7f
} as usize;
self.rom_offset = bank * 16 * 1024
}
fn update_ram_offset(&mut self) {
self.ram_offset = self.ram_bank as usize * 8 * 1024
}
}
impl Mbc for Mbc3 {
fn read(&self, rom: &Box<[u8]>, addr: u16) -> u8 {
match addr {
0x0000...0x3fff => rom[addr as usize],
0x4000...0x7fff => rom[addr as usize - 0x4000 + self.rom_offset],
_ => panic!("Address out of range 0x{:x}", addr),
}
}
fn write(&mut self, addr: u16, val: u8) {
match addr {
0x0000...0x1fff => self.ram_write_protected = val != 0x0a,
0x2000...0x3fff => self.rom_bank = val,
0x4000...0x5fff => self.ram_bank = val,
0x6000...0x7fff => {
if self.rtc_latch == 0 && val == 1 {
self.latched_rtc = self.rtc.clone()
}
self.rtc_latch = val
}
_ => panic!("Illegal address 0x{:x}", addr),
}
self.update_rom_offset();
self.update_ram_offset()
}
fn read_ram(&self, addr: u16) -> u8 {
match self.ram_bank {
0...3 => self.ram[addr as usize - 0xa000 + self.ram_offset],
0x08 => self.latched_rtc.rtc_seconds,
0x09 => self.latched_rtc.rtc_minutes,
0x0a => self.latched_rtc.rtc_hours,
0x0b => self.latched_rtc.rtc_days_low,
0x0c => self.latched_rtc.rtc_days_high,
_ => panic!("Illegal ram bank: {:?}", self.ram_bank),
}
}
fn write_ram(&mut self, addr: u16, val: u8) {
if !self.ram_write_protected {
match self.ram_bank {
0...3 => self.ram[addr as usize - 0xa000 + self.ram_offset] = val,
0x08 => self.rtc.rtc_seconds = val & 0x3f,
0x09 => self.rtc.rtc_minutes = val & 0x3f,
0x0a => self.rtc.rtc_hours = val & 0x1f,
0x0b => self.rtc.rtc_days_low = val,
0x0c => self.rtc.rtc_days_high = val & 0b1100_0001,
_ => panic!("Illegal ram bank: {:?}", self.ram_bank),
}
}
}
fn copy_ram(&self) -> Option<Box<[u8]>> {
if self.ram.len() > 0 {
Some(self.ram.clone())
} else {
None
}
}
}
|
pub mod camera;
pub mod sphere;
use crate::scene::camera::Camera;
use crate::vec3::{Vec3};
use crate::ray::{Ray, hit::{Hittable}};
use crate::image::{Image};
use rand::prelude::*;
use rayon::prelude::*;
pub struct Scene {
pub camera: Camera,
pub objects: Vec<Box<dyn Hittable>>
}
impl Scene {
pub fn new(camera: Camera) -> Scene {
Scene {camera,objects: Vec::new()}
}
pub fn render(&self, width: usize, height: usize, samples_per_pixel: u64, max_depth : u64) -> Image{
let mut image= Image::new(width, height, Vec3(0.0,0.0,0.0));
image.pixels_mut()
.par_chunks_exact_mut(width).enumerate().for_each(
|(i,row)| row.par_iter_mut().enumerate().for_each(
|(j,pixel)| {
let mut color = Vec3(0.0,0.0,0.0);
for _sample in 0..samples_per_pixel {
let mut rng = rand::thread_rng();
let u = (i as f64 + rng.gen::<f64>() as f64) / (width as f64);
let v = (j as f64 + rng.gen::<f64>()) / (height as f64);
let r = self.camera.get_ray(u,v);
color = color + r.color(&self.objects, max_depth);
}
color.0 = (color.0 * (1.0/ samples_per_pixel as f64)).sqrt();
color.1 = (color.1 * (1.0/ samples_per_pixel as f64)).sqrt();
color.2 = (color.2 * (1.0/ samples_per_pixel as f64)).sqrt();
*pixel= color
})
);
image
}
pub fn add(&mut self, obj: Box<dyn Hittable>) {
self.objects.push(obj);
}
}
|
use std::io::Write;
use std::process::{Command, Stdio};
pub fn save_graph(graph: &str, output_path: &str) {
let dot_position = output_path.rfind('.').expect("`output_path` doesn't have an extension.");
let format = &output_path[dot_position + 1..];
assert!(!format.is_empty(), "Invalid file format.");
let process = Command::new("dot")
.args(&[&format!("-T{}", format), "-o", output_path])
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn();
if let Ok(mut process) = process {
{
let stdin = process.stdin.as_mut()
.expect("Getting `dot` stdin failed.");
stdin.write_all(graph.as_bytes())
.expect("Writing to `dot` stdin failed.");
}
let output = process.wait_with_output()
.expect("Waiting for `dot` failed.");
let no_output = output.stderr.is_empty() && output.stdout.is_empty();
if !no_output {
println!("{}", String::from_utf8_lossy(&output.stdout));
println!("{}", String::from_utf8_lossy(&output.stderr));
}
assert!(output.status.success() && no_output,
"`dot` failed to generate graph.");
} else {
println!("WARNING: Failed to run `dot`, it's probably not installed.");
}
}
|
#[doc = "Reader of register RCC"]
pub type R = crate::R<u32, super::RCC>;
#[doc = "Writer for register RCC"]
pub type W = crate::W<u32, super::RCC>;
#[doc = "Register RCC `reset()`'s with value 0"]
impl crate::ResetValue for super::RCC {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `MOSCDIS`"]
pub type MOSCDIS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MOSCDIS`"]
pub struct MOSCDIS_W<'a> {
w: &'a mut W,
}
impl<'a> MOSCDIS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Oscillator Source\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum OSCSRC_A {
#[doc = "0: MOSC"]
MAIN = 0,
#[doc = "1: IOSC"]
INT = 1,
#[doc = "2: IOSC/4"]
INT4 = 2,
#[doc = "3: LFIOSC"]
_30 = 3,
}
impl From<OSCSRC_A> for u8 {
#[inline(always)]
fn from(variant: OSCSRC_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `OSCSRC`"]
pub type OSCSRC_R = crate::R<u8, OSCSRC_A>;
impl OSCSRC_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OSCSRC_A {
match self.bits {
0 => OSCSRC_A::MAIN,
1 => OSCSRC_A::INT,
2 => OSCSRC_A::INT4,
3 => OSCSRC_A::_30,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `MAIN`"]
#[inline(always)]
pub fn is_main(&self) -> bool {
*self == OSCSRC_A::MAIN
}
#[doc = "Checks if the value of the field is `INT`"]
#[inline(always)]
pub fn is_int(&self) -> bool {
*self == OSCSRC_A::INT
}
#[doc = "Checks if the value of the field is `INT4`"]
#[inline(always)]
pub fn is_int4(&self) -> bool {
*self == OSCSRC_A::INT4
}
#[doc = "Checks if the value of the field is `_30`"]
#[inline(always)]
pub fn is_30(&self) -> bool {
*self == OSCSRC_A::_30
}
}
#[doc = "Write proxy for field `OSCSRC`"]
pub struct OSCSRC_W<'a> {
w: &'a mut W,
}
impl<'a> OSCSRC_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: OSCSRC_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "MOSC"]
#[inline(always)]
pub fn main(self) -> &'a mut W {
self.variant(OSCSRC_A::MAIN)
}
#[doc = "IOSC"]
#[inline(always)]
pub fn int(self) -> &'a mut W {
self.variant(OSCSRC_A::INT)
}
#[doc = "IOSC/4"]
#[inline(always)]
pub fn int4(self) -> &'a mut W {
self.variant(OSCSRC_A::INT4)
}
#[doc = "LFIOSC"]
#[inline(always)]
pub fn _30(self) -> &'a mut W {
self.variant(OSCSRC_A::_30)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 4)) | (((value as u32) & 0x03) << 4);
self.w
}
}
#[doc = "Crystal Value\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum XTAL_A {
#[doc = "6: 4 MHz"]
_4MHZ = 6,
#[doc = "7: 4.096 MHz"]
_4_09MHZ = 7,
#[doc = "8: 4.9152 MHz"]
_4_91MHZ = 8,
#[doc = "9: 5 MHz"]
_5MHZ = 9,
#[doc = "10: 5.12 MHz"]
_5_12MHZ = 10,
#[doc = "11: 6 MHz"]
_6MHZ = 11,
#[doc = "12: 6.144 MHz"]
_6_14MHZ = 12,
#[doc = "13: 7.3728 MHz"]
_7_37MHZ = 13,
#[doc = "14: 8 MHz"]
_8MHZ = 14,
#[doc = "15: 8.192 MHz"]
_8_19MHZ = 15,
#[doc = "16: 10 MHz"]
_10MHZ = 16,
#[doc = "17: 12 MHz"]
_12MHZ = 17,
#[doc = "18: 12.288 MHz"]
_12_2MHZ = 18,
#[doc = "19: 13.56 MHz"]
_13_5MHZ = 19,
#[doc = "20: 14.31818 MHz"]
_14_3MHZ = 20,
#[doc = "21: 16 MHz"]
_16MHZ = 21,
#[doc = "22: 16.384 MHz"]
_16_3MHZ = 22,
#[doc = "23: 18.0 MHz (USB)"]
_18MHZ = 23,
#[doc = "24: 20.0 MHz (USB)"]
_20MHZ = 24,
#[doc = "25: 24.0 MHz (USB)"]
_24MHZ = 25,
#[doc = "26: 25.0 MHz (USB)"]
_25MHZ = 26,
}
impl From<XTAL_A> for u8 {
#[inline(always)]
fn from(variant: XTAL_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `XTAL`"]
pub type XTAL_R = crate::R<u8, XTAL_A>;
impl XTAL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, XTAL_A> {
use crate::Variant::*;
match self.bits {
6 => Val(XTAL_A::_4MHZ),
7 => Val(XTAL_A::_4_09MHZ),
8 => Val(XTAL_A::_4_91MHZ),
9 => Val(XTAL_A::_5MHZ),
10 => Val(XTAL_A::_5_12MHZ),
11 => Val(XTAL_A::_6MHZ),
12 => Val(XTAL_A::_6_14MHZ),
13 => Val(XTAL_A::_7_37MHZ),
14 => Val(XTAL_A::_8MHZ),
15 => Val(XTAL_A::_8_19MHZ),
16 => Val(XTAL_A::_10MHZ),
17 => Val(XTAL_A::_12MHZ),
18 => Val(XTAL_A::_12_2MHZ),
19 => Val(XTAL_A::_13_5MHZ),
20 => Val(XTAL_A::_14_3MHZ),
21 => Val(XTAL_A::_16MHZ),
22 => Val(XTAL_A::_16_3MHZ),
23 => Val(XTAL_A::_18MHZ),
24 => Val(XTAL_A::_20MHZ),
25 => Val(XTAL_A::_24MHZ),
26 => Val(XTAL_A::_25MHZ),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `_4MHZ`"]
#[inline(always)]
pub fn is_4mhz(&self) -> bool {
*self == XTAL_A::_4MHZ
}
#[doc = "Checks if the value of the field is `_4_09MHZ`"]
#[inline(always)]
pub fn is_4_09mhz(&self) -> bool {
*self == XTAL_A::_4_09MHZ
}
#[doc = "Checks if the value of the field is `_4_91MHZ`"]
#[inline(always)]
pub fn is_4_91mhz(&self) -> bool {
*self == XTAL_A::_4_91MHZ
}
#[doc = "Checks if the value of the field is `_5MHZ`"]
#[inline(always)]
pub fn is_5mhz(&self) -> bool {
*self == XTAL_A::_5MHZ
}
#[doc = "Checks if the value of the field is `_5_12MHZ`"]
#[inline(always)]
pub fn is_5_12mhz(&self) -> bool {
*self == XTAL_A::_5_12MHZ
}
#[doc = "Checks if the value of the field is `_6MHZ`"]
#[inline(always)]
pub fn is_6mhz(&self) -> bool {
*self == XTAL_A::_6MHZ
}
#[doc = "Checks if the value of the field is `_6_14MHZ`"]
#[inline(always)]
pub fn is_6_14mhz(&self) -> bool {
*self == XTAL_A::_6_14MHZ
}
#[doc = "Checks if the value of the field is `_7_37MHZ`"]
#[inline(always)]
pub fn is_7_37mhz(&self) -> bool {
*self == XTAL_A::_7_37MHZ
}
#[doc = "Checks if the value of the field is `_8MHZ`"]
#[inline(always)]
pub fn is_8mhz(&self) -> bool {
*self == XTAL_A::_8MHZ
}
#[doc = "Checks if the value of the field is `_8_19MHZ`"]
#[inline(always)]
pub fn is_8_19mhz(&self) -> bool {
*self == XTAL_A::_8_19MHZ
}
#[doc = "Checks if the value of the field is `_10MHZ`"]
#[inline(always)]
pub fn is_10mhz(&self) -> bool {
*self == XTAL_A::_10MHZ
}
#[doc = "Checks if the value of the field is `_12MHZ`"]
#[inline(always)]
pub fn is_12mhz(&self) -> bool {
*self == XTAL_A::_12MHZ
}
#[doc = "Checks if the value of the field is `_12_2MHZ`"]
#[inline(always)]
pub fn is_12_2mhz(&self) -> bool {
*self == XTAL_A::_12_2MHZ
}
#[doc = "Checks if the value of the field is `_13_5MHZ`"]
#[inline(always)]
pub fn is_13_5mhz(&self) -> bool {
*self == XTAL_A::_13_5MHZ
}
#[doc = "Checks if the value of the field is `_14_3MHZ`"]
#[inline(always)]
pub fn is_14_3mhz(&self) -> bool {
*self == XTAL_A::_14_3MHZ
}
#[doc = "Checks if the value of the field is `_16MHZ`"]
#[inline(always)]
pub fn is_16mhz(&self) -> bool {
*self == XTAL_A::_16MHZ
}
#[doc = "Checks if the value of the field is `_16_3MHZ`"]
#[inline(always)]
pub fn is_16_3mhz(&self) -> bool {
*self == XTAL_A::_16_3MHZ
}
#[doc = "Checks if the value of the field is `_18MHZ`"]
#[inline(always)]
pub fn is_18mhz(&self) -> bool {
*self == XTAL_A::_18MHZ
}
#[doc = "Checks if the value of the field is `_20MHZ`"]
#[inline(always)]
pub fn is_20mhz(&self) -> bool {
*self == XTAL_A::_20MHZ
}
#[doc = "Checks if the value of the field is `_24MHZ`"]
#[inline(always)]
pub fn is_24mhz(&self) -> bool {
*self == XTAL_A::_24MHZ
}
#[doc = "Checks if the value of the field is `_25MHZ`"]
#[inline(always)]
pub fn is_25mhz(&self) -> bool {
*self == XTAL_A::_25MHZ
}
}
#[doc = "Write proxy for field `XTAL`"]
pub struct XTAL_W<'a> {
w: &'a mut W,
}
impl<'a> XTAL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: XTAL_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "4 MHz"]
#[inline(always)]
pub fn _4mhz(self) -> &'a mut W {
self.variant(XTAL_A::_4MHZ)
}
#[doc = "4.096 MHz"]
#[inline(always)]
pub fn _4_09mhz(self) -> &'a mut W {
self.variant(XTAL_A::_4_09MHZ)
}
#[doc = "4.9152 MHz"]
#[inline(always)]
pub fn _4_91mhz(self) -> &'a mut W {
self.variant(XTAL_A::_4_91MHZ)
}
#[doc = "5 MHz"]
#[inline(always)]
pub fn _5mhz(self) -> &'a mut W {
self.variant(XTAL_A::_5MHZ)
}
#[doc = "5.12 MHz"]
#[inline(always)]
pub fn _5_12mhz(self) -> &'a mut W {
self.variant(XTAL_A::_5_12MHZ)
}
#[doc = "6 MHz"]
#[inline(always)]
pub fn _6mhz(self) -> &'a mut W {
self.variant(XTAL_A::_6MHZ)
}
#[doc = "6.144 MHz"]
#[inline(always)]
pub fn _6_14mhz(self) -> &'a mut W {
self.variant(XTAL_A::_6_14MHZ)
}
#[doc = "7.3728 MHz"]
#[inline(always)]
pub fn _7_37mhz(self) -> &'a mut W {
self.variant(XTAL_A::_7_37MHZ)
}
#[doc = "8 MHz"]
#[inline(always)]
pub fn _8mhz(self) -> &'a mut W {
self.variant(XTAL_A::_8MHZ)
}
#[doc = "8.192 MHz"]
#[inline(always)]
pub fn _8_19mhz(self) -> &'a mut W {
self.variant(XTAL_A::_8_19MHZ)
}
#[doc = "10 MHz"]
#[inline(always)]
pub fn _10mhz(self) -> &'a mut W {
self.variant(XTAL_A::_10MHZ)
}
#[doc = "12 MHz"]
#[inline(always)]
pub fn _12mhz(self) -> &'a mut W {
self.variant(XTAL_A::_12MHZ)
}
#[doc = "12.288 MHz"]
#[inline(always)]
pub fn _12_2mhz(self) -> &'a mut W {
self.variant(XTAL_A::_12_2MHZ)
}
#[doc = "13.56 MHz"]
#[inline(always)]
pub fn _13_5mhz(self) -> &'a mut W {
self.variant(XTAL_A::_13_5MHZ)
}
#[doc = "14.31818 MHz"]
#[inline(always)]
pub fn _14_3mhz(self) -> &'a mut W {
self.variant(XTAL_A::_14_3MHZ)
}
#[doc = "16 MHz"]
#[inline(always)]
pub fn _16mhz(self) -> &'a mut W {
self.variant(XTAL_A::_16MHZ)
}
#[doc = "16.384 MHz"]
#[inline(always)]
pub fn _16_3mhz(self) -> &'a mut W {
self.variant(XTAL_A::_16_3MHZ)
}
#[doc = "18.0 MHz (USB)"]
#[inline(always)]
pub fn _18mhz(self) -> &'a mut W {
self.variant(XTAL_A::_18MHZ)
}
#[doc = "20.0 MHz (USB)"]
#[inline(always)]
pub fn _20mhz(self) -> &'a mut W {
self.variant(XTAL_A::_20MHZ)
}
#[doc = "24.0 MHz (USB)"]
#[inline(always)]
pub fn _24mhz(self) -> &'a mut W {
self.variant(XTAL_A::_24MHZ)
}
#[doc = "25.0 MHz (USB)"]
#[inline(always)]
pub fn _25mhz(self) -> &'a mut W {
self.variant(XTAL_A::_25MHZ)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x1f << 6)) | (((value as u32) & 0x1f) << 6);
self.w
}
}
#[doc = "Reader of field `BYPASS`"]
pub type BYPASS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `BYPASS`"]
pub struct BYPASS_W<'a> {
w: &'a mut W,
}
impl<'a> BYPASS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Reader of field `PWRDN`"]
pub type PWRDN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PWRDN`"]
pub struct PWRDN_W<'a> {
w: &'a mut W,
}
impl<'a> PWRDN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);
self.w
}
}
#[doc = "PWM Unit Clock Divisor\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum PWMDIV_A {
#[doc = "0: PWM clock /2"]
_2 = 0,
#[doc = "1: PWM clock /4"]
_4 = 1,
#[doc = "2: PWM clock /8"]
_8 = 2,
#[doc = "3: PWM clock /16"]
_16 = 3,
#[doc = "4: PWM clock /32"]
_32 = 4,
#[doc = "5: PWM clock /64"]
_64 = 5,
}
impl From<PWMDIV_A> for u8 {
#[inline(always)]
fn from(variant: PWMDIV_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `PWMDIV`"]
pub type PWMDIV_R = crate::R<u8, PWMDIV_A>;
impl PWMDIV_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, PWMDIV_A> {
use crate::Variant::*;
match self.bits {
0 => Val(PWMDIV_A::_2),
1 => Val(PWMDIV_A::_4),
2 => Val(PWMDIV_A::_8),
3 => Val(PWMDIV_A::_16),
4 => Val(PWMDIV_A::_32),
5 => Val(PWMDIV_A::_64),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `_2`"]
#[inline(always)]
pub fn is_2(&self) -> bool {
*self == PWMDIV_A::_2
}
#[doc = "Checks if the value of the field is `_4`"]
#[inline(always)]
pub fn is_4(&self) -> bool {
*self == PWMDIV_A::_4
}
#[doc = "Checks if the value of the field is `_8`"]
#[inline(always)]
pub fn is_8(&self) -> bool {
*self == PWMDIV_A::_8
}
#[doc = "Checks if the value of the field is `_16`"]
#[inline(always)]
pub fn is_16(&self) -> bool {
*self == PWMDIV_A::_16
}
#[doc = "Checks if the value of the field is `_32`"]
#[inline(always)]
pub fn is_32(&self) -> bool {
*self == PWMDIV_A::_32
}
#[doc = "Checks if the value of the field is `_64`"]
#[inline(always)]
pub fn is_64(&self) -> bool {
*self == PWMDIV_A::_64
}
}
#[doc = "Write proxy for field `PWMDIV`"]
pub struct PWMDIV_W<'a> {
w: &'a mut W,
}
impl<'a> PWMDIV_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: PWMDIV_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "PWM clock /2"]
#[inline(always)]
pub fn _2(self) -> &'a mut W {
self.variant(PWMDIV_A::_2)
}
#[doc = "PWM clock /4"]
#[inline(always)]
pub fn _4(self) -> &'a mut W {
self.variant(PWMDIV_A::_4)
}
#[doc = "PWM clock /8"]
#[inline(always)]
pub fn _8(self) -> &'a mut W {
self.variant(PWMDIV_A::_8)
}
#[doc = "PWM clock /16"]
#[inline(always)]
pub fn _16(self) -> &'a mut W {
self.variant(PWMDIV_A::_16)
}
#[doc = "PWM clock /32"]
#[inline(always)]
pub fn _32(self) -> &'a mut W {
self.variant(PWMDIV_A::_32)
}
#[doc = "PWM clock /64"]
#[inline(always)]
pub fn _64(self) -> &'a mut W {
self.variant(PWMDIV_A::_64)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 17)) | (((value as u32) & 0x07) << 17);
self.w
}
}
#[doc = "Reader of field `USEPWMDIV`"]
pub type USEPWMDIV_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `USEPWMDIV`"]
pub struct USEPWMDIV_W<'a> {
w: &'a mut W,
}
impl<'a> USEPWMDIV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);
self.w
}
}
#[doc = "Reader of field `USESYSDIV`"]
pub type USESYSDIV_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `USESYSDIV`"]
pub struct USESYSDIV_W<'a> {
w: &'a mut W,
}
impl<'a> USESYSDIV_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22);
self.w
}
}
#[doc = "Reader of field `SYSDIV`"]
pub type SYSDIV_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `SYSDIV`"]
pub struct SYSDIV_W<'a> {
w: &'a mut W,
}
impl<'a> SYSDIV_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 23)) | (((value as u32) & 0x0f) << 23);
self.w
}
}
#[doc = "Reader of field `ACG`"]
pub type ACG_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ACG`"]
pub struct ACG_W<'a> {
w: &'a mut W,
}
impl<'a> ACG_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 27)) | (((value as u32) & 0x01) << 27);
self.w
}
}
impl R {
#[doc = "Bit 0 - Main Oscillator Disable"]
#[inline(always)]
pub fn moscdis(&self) -> MOSCDIS_R {
MOSCDIS_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bits 4:5 - Oscillator Source"]
#[inline(always)]
pub fn oscsrc(&self) -> OSCSRC_R {
OSCSRC_R::new(((self.bits >> 4) & 0x03) as u8)
}
#[doc = "Bits 6:10 - Crystal Value"]
#[inline(always)]
pub fn xtal(&self) -> XTAL_R {
XTAL_R::new(((self.bits >> 6) & 0x1f) as u8)
}
#[doc = "Bit 11 - PLL Bypass"]
#[inline(always)]
pub fn bypass(&self) -> BYPASS_R {
BYPASS_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 13 - PLL Power Down"]
#[inline(always)]
pub fn pwrdn(&self) -> PWRDN_R {
PWRDN_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bits 17:19 - PWM Unit Clock Divisor"]
#[inline(always)]
pub fn pwmdiv(&self) -> PWMDIV_R {
PWMDIV_R::new(((self.bits >> 17) & 0x07) as u8)
}
#[doc = "Bit 20 - Enable PWM Clock Divisor"]
#[inline(always)]
pub fn usepwmdiv(&self) -> USEPWMDIV_R {
USEPWMDIV_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 22 - Enable System Clock Divider"]
#[inline(always)]
pub fn usesysdiv(&self) -> USESYSDIV_R {
USESYSDIV_R::new(((self.bits >> 22) & 0x01) != 0)
}
#[doc = "Bits 23:26 - System Clock Divisor"]
#[inline(always)]
pub fn sysdiv(&self) -> SYSDIV_R {
SYSDIV_R::new(((self.bits >> 23) & 0x0f) as u8)
}
#[doc = "Bit 27 - Auto Clock Gating"]
#[inline(always)]
pub fn acg(&self) -> ACG_R {
ACG_R::new(((self.bits >> 27) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Main Oscillator Disable"]
#[inline(always)]
pub fn moscdis(&mut self) -> MOSCDIS_W {
MOSCDIS_W { w: self }
}
#[doc = "Bits 4:5 - Oscillator Source"]
#[inline(always)]
pub fn oscsrc(&mut self) -> OSCSRC_W {
OSCSRC_W { w: self }
}
#[doc = "Bits 6:10 - Crystal Value"]
#[inline(always)]
pub fn xtal(&mut self) -> XTAL_W {
XTAL_W { w: self }
}
#[doc = "Bit 11 - PLL Bypass"]
#[inline(always)]
pub fn bypass(&mut self) -> BYPASS_W {
BYPASS_W { w: self }
}
#[doc = "Bit 13 - PLL Power Down"]
#[inline(always)]
pub fn pwrdn(&mut self) -> PWRDN_W {
PWRDN_W { w: self }
}
#[doc = "Bits 17:19 - PWM Unit Clock Divisor"]
#[inline(always)]
pub fn pwmdiv(&mut self) -> PWMDIV_W {
PWMDIV_W { w: self }
}
#[doc = "Bit 20 - Enable PWM Clock Divisor"]
#[inline(always)]
pub fn usepwmdiv(&mut self) -> USEPWMDIV_W {
USEPWMDIV_W { w: self }
}
#[doc = "Bit 22 - Enable System Clock Divider"]
#[inline(always)]
pub fn usesysdiv(&mut self) -> USESYSDIV_W {
USESYSDIV_W { w: self }
}
#[doc = "Bits 23:26 - System Clock Divisor"]
#[inline(always)]
pub fn sysdiv(&mut self) -> SYSDIV_W {
SYSDIV_W { w: self }
}
#[doc = "Bit 27 - Auto Clock Gating"]
#[inline(always)]
pub fn acg(&mut self) -> ACG_W {
ACG_W { w: self }
}
}
|
pub mod default;
pub mod query;
pub mod user_tag;
|
//! byte operation APIs
use crate::object::AsObject;
use crate::{PyObject, PyResult, VirtualMachine};
use num_traits::ToPrimitive;
pub fn bytes_from_object(vm: &VirtualMachine, obj: &PyObject) -> PyResult<Vec<u8>> {
if let Ok(elements) = obj.try_bytes_like(vm, |bytes| bytes.to_vec()) {
return Ok(elements);
}
if !obj.fast_isinstance(vm.ctx.types.str_type) {
if let Ok(elements) = vm.map_iterable_object(obj, |x| value_from_object(vm, &x)) {
return elements;
}
}
Err(vm.new_type_error(
"can assign only bytes, buffers, or iterables of ints in range(0, 256)".to_owned(),
))
}
pub fn value_from_object(vm: &VirtualMachine, obj: &PyObject) -> PyResult<u8> {
obj.try_index(vm)?
.as_bigint()
.to_u8()
.ok_or_else(|| vm.new_value_error("byte must be in range(0, 256)".to_owned()))
}
|
use rand::seq::SliceRandom;
const USER_AGENTS: [&str; 16] = [
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.77 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.63",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/110.0",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:89.0) Gecko/20100101 Firefox/89.0",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:53.0) Gecko/20100101 Firefox/53.0",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36 Edge/14.14393",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.3 Safari/605.1.15",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:97.0) Gecko/20100101 Firefox/97.0",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.99 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36",
"Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/110.0",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/110.0",
];
#[derive(Debug)]
pub enum UserAgent {
CustomUserAgent(String),
RandomUserAgent,
}
pub fn random_user_agent() -> String {
let ua = USER_AGENTS.choose(&mut rand::thread_rng());
// safe unwrap because the array is not empty
ua.unwrap().to_string()
}
#[cfg(test)]
mod user_agent_tests {
use super::*;
#[test]
fn get_random_user_agent() {
let ua = random_user_agent();
assert!(ua.starts_with("Mozilla/5.0"));
}
}
|
use dg2core::{InnerMessage, MessagePart, OuterMessage, Store};
/// Test the usual workflow:
/// - init a store (incl. key pairs)
/// - create a group
/// - init another store
/// - simulate send+recv of a DM with the test group key, without fs roundtrip
#[test]
fn simple() {
let mut store = Store::new();
let test_group = dg2core::gm::gen_key();
store.groupkeys.insert("test".to_string(), test_group.clone());
let mut store2 = Store::new();
// DM {test_group}
let mut msg = InnerMessage::new();
msg.insert("test.gk".to_string(), MessagePart::GroupKey(test_group));
let mut msg = OuterMessage::try_new(&msg).expect("unable to serialize InnerMessage");
msg.attach_signature(&store.pubkey.signs, &store.sgnkey);
let blob =
dg2core::dm::encode_message(&msg, &store2.pubkey.msgs).expect("unable to encode message");
// imagine send+recv here
let msg = dg2core::dm::decode_message(&blob, &store2.pubkey.msgs, &store2.msgkey)
.expect("unable to decode message");
msg.verify().for_each(|i| assert!(i.1));
let msg = msg.inner().expect("unable to deserialize InnerMessage");
if let MessagePart::GroupKey(gk) = &msg["test.gk"] {
assert_eq!(gk, &store.groupkeys["test"]);
store2.groupkeys.insert("test".to_string(), gk.clone());
} else {
panic!("message part test.gk not found");
}
}
|
use state::object::Object;
use state::object::Idx;
use state::object::Pixel;
use state::object::Icon;
use state::object::Color;
pub struct Wall {
idx: Idx
}
impl Wall {
pub fn new(idx: Idx) -> Wall {
Wall {idx}
}
}
impl Object for Wall {
fn get_idx(&self) -> Idx {
self.idx
}
fn get_pixel(&self) -> Pixel {
Pixel(Icon::Wall, Color(255, 255, 255))
}
fn get_ordinal(&self) -> i32 {
0
}
fn is_environment(&self) -> bool {
true
}
fn is_blocking(&self) -> bool {
true
}
fn is_opaque(&self) -> bool {
true
}
fn name(&self) -> &str {
"Wall"
}
} |
use chrono::Utc;
use serde_derive::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
pub struct IdRequest {
pub id: i32,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct DateRequest {
pub year: i32,
pub month: u8,
}
#[derive(Debug, Serialize)]
pub struct Transaction {
pub id: i32,
pub date: chrono::DateTime<Utc>,
pub name: String,
}
#[derive(Debug, Serialize)]
pub struct SqlResult {
pub value: i32,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
pub struct Currency {
pub code: String,
pub numeric_code: i32,
pub minor_unit: i32,
pub name: String,
}
|
//! Hyper service that adds a context to an incoming request and passes it on
//! to a wrapped service.
use crate::{Push, XSpanIdString};
use futures::future::FutureExt;
use hyper::Request;
use std::marker::PhantomData;
use std::task::Poll;
/// Middleware wrapper service, that should be used as the outermost layer in a
/// stack of hyper services. Adds a context to a plain `hyper::Request` that can be
/// used by subsequent layers in the stack.
#[derive(Debug)]
pub struct AddContextMakeService<T, C>
where
C: Default + Push<XSpanIdString> + 'static + Send,
C::Result: Send + 'static,
{
inner: T,
marker: PhantomData<C>,
}
impl<T, C> AddContextMakeService<T, C>
where
C: Default + Push<XSpanIdString> + 'static + Send,
C::Result: Send + 'static,
{
/// Create a new AddContextMakeService struct wrapping a value
pub fn new(inner: T) -> Self {
AddContextMakeService {
inner,
marker: PhantomData,
}
}
}
impl<Inner, Context, Target> hyper::service::Service<Target>
for AddContextMakeService<Inner, Context>
where
Context: Default + Push<XSpanIdString> + 'static + Send,
Context::Result: Send + 'static,
Inner: hyper::service::Service<Target>,
Inner::Future: Send + 'static,
{
type Error = Inner::Error;
type Response = AddContextService<Inner::Response, Context>;
type Future = futures::future::BoxFuture<'static, Result<Self::Response, Self::Error>>;
fn poll_ready(&mut self, cx: &mut std::task::Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, target: Target) -> Self::Future {
Box::pin(
self.inner
.call(target)
.map(|s| Ok(AddContextService::new(s?))),
)
}
}
/// Middleware wrapper service, that should be used as the outermost layer in a
/// stack of hyper services. Adds a context to a plain `hyper::Request` that can be
/// used by subsequent layers in the stack. The `AddContextService` struct should
/// not usually be used directly - when constructing a hyper stack use
/// `AddContextMakeService`, which will create `AddContextService` instances as needed.
#[derive(Debug)]
pub struct AddContextService<T, C>
where
C: Default + Push<XSpanIdString>,
C::Result: Send + 'static,
{
inner: T,
marker: PhantomData<C>,
}
impl<T, C> AddContextService<T, C>
where
C: Default + Push<XSpanIdString>,
C::Result: Send + 'static,
{
/// Create a new AddContextService struct wrapping a value
pub fn new(inner: T) -> Self {
AddContextService {
inner,
marker: PhantomData,
}
}
}
impl<Inner, Context, Body> hyper::service::Service<Request<Body>>
for AddContextService<Inner, Context>
where
Context: Default + Push<XSpanIdString> + Send + 'static,
Context::Result: Send + 'static,
Inner: hyper::service::Service<(Request<Body>, Context::Result)>,
{
type Response = Inner::Response;
type Error = Inner::Error;
type Future = Inner::Future;
fn poll_ready(
&mut self,
context: &mut std::task::Context<'_>,
) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(context)
}
fn call(&mut self, req: Request<Body>) -> Self::Future {
let x_span_id = XSpanIdString::get_or_generate(&req);
let context = Context::default().push(x_span_id);
self.inner.call((req, context))
}
}
|
//! solana-cli-program-template Integration Tests (local)
//!
//! Performs local validator test:
//! 1. Assumes solana-test-validator is already started (see note below)
//! 2. Creates/funds wallets and accounts from `keys` directory
//! 3. Tests for sucessful Initialize, Mint, Transfer and Burn of key/value pairs
//! 4. Tests for failing condition handling
//!
//! Note:
//! Running `solana-test-validator` with clean ledger:
//! ```
//! solana-test-validator --bpf-program SampGgdt3wioaoMZhC6LTSbg4pnuvQnSfJpDYeuXQBv ~/solana-cli-program-template/program/target/bpfel-unknown-unknown/release/solana_cli_template_program_bpf.so --ledger ~/solana-cli-program-template/.ledger --reset
//! ```
//! Running `solana-test-validator` with existing ledger and program already loaded
//! ```
//! solana-test-validator --ledger ~/solana-cli-program-template/.ledger
//! ```
pub mod common;
use {
cli_program_template::prelude::{
burn_instruction, get_account_for, mint_transaction, transfer_instruction,
unpack_account_data, Instructions, KEYS_DB, PROG_KEY,
},
common::{load_and_initialize_accounts, load_user_wallets, rpc_client_from_config},
solana_sdk::{instruction::AccountMeta, signer::Signer},
};
#[test]
fn test_initialization_pass() {
let setup = rpc_client_from_config();
assert!(setup.is_ok());
let (rpc_client, funding_keypair) = setup.unwrap();
assert!(get_account_for(
&rpc_client,
&funding_keypair.pubkey(),
rpc_client.commitment()
)
.is_some());
assert!(get_account_for(&rpc_client, &PROG_KEY.pubkey(), rpc_client.commitment()).is_some());
}
#[test]
fn test_wallet_loading_pass() {
let (rpc_client, funding_keypair) = rpc_client_from_config().unwrap();
let loaded_wallets = load_user_wallets(&rpc_client, &funding_keypair, rpc_client.commitment());
assert_eq!(loaded_wallets.len(), 3);
}
#[test]
fn test_wallet_and_account_initialization_pass() {
let (rpc_client, funding_keypair) = rpc_client_from_config().unwrap();
let loaded_wallets = load_user_wallets(&rpc_client, &funding_keypair, rpc_client.commitment());
assert_eq!(loaded_wallets.len(), 3);
let initialized_accounts = load_and_initialize_accounts(
&rpc_client,
Instructions::InitializeAccount as u8,
rpc_client.commitment(),
);
assert_eq!(initialized_accounts.len(), 3);
for account in initialized_accounts {
let (initialized, _) =
unpack_account_data(&rpc_client, account, rpc_client.commitment()).unwrap();
assert!(initialized);
}
}
#[test]
fn test_load_mint_transfer_burn_no_fee_pass() {
let (rpc_client, funding_keypair) = rpc_client_from_config().unwrap();
let loaded_wallets = load_user_wallets(&rpc_client, &funding_keypair, rpc_client.commitment());
assert_eq!(loaded_wallets.len(), 3);
let initialized_accounts = load_and_initialize_accounts(
&rpc_client,
Instructions::InitializeAccount as u8,
rpc_client.commitment(),
);
assert_eq!(initialized_accounts.len(), 3);
// Setup key/value data and get accounts used in transactions
let user1 = String::from("User1");
let user2 = String::from("User2");
let mint_key = String::from("test_key_1");
let mint_value = String::from("value for test_key_1");
let (wallet1, account1) = KEYS_DB.wallet_and_account(user1).unwrap();
let (wallet2, account2) = KEYS_DB.wallet_and_account(user2).unwrap();
// Do mint to User1
let mint_result = mint_transaction(
&rpc_client,
&[
AccountMeta::new(account1.pubkey(), false),
AccountMeta::new(wallet1.pubkey(), true),
],
wallet1,
&mint_key,
&mint_value,
Instructions::FreeMint as u8,
rpc_client.commitment(),
);
assert!(mint_result.is_ok());
let (_, btree) = unpack_account_data(&rpc_client, account1, rpc_client.commitment()).unwrap();
assert!(btree.contains_key(&mint_key));
// Do transfer of key/value from User1 to User2
let transfer_result = transfer_instruction(
&rpc_client,
&[
AccountMeta::new(account1.pubkey(), false),
AccountMeta::new(account2.pubkey(), false),
AccountMeta::new(wallet1.pubkey(), true),
],
wallet1,
&mint_key,
Instructions::FreeTransfer as u8,
rpc_client.commitment(),
);
assert!(transfer_result.is_ok());
let (_, btree1) = unpack_account_data(&rpc_client, account1, rpc_client.commitment()).unwrap();
let (_, btree2) = unpack_account_data(&rpc_client, account2, rpc_client.commitment()).unwrap();
assert!(!btree1.contains_key(&mint_key));
assert!(btree2.contains_key(&mint_key));
assert_eq!(btree2.get(&mint_key).unwrap(), &mint_value);
// Burn the key/value just transfered to User2
let burn_result = burn_instruction(
&rpc_client,
&[
AccountMeta::new(account2.pubkey(), false),
AccountMeta::new(wallet2.pubkey(), true),
],
wallet2,
&mint_key,
Instructions::FreeBurn as u8,
rpc_client.commitment(),
);
assert!(burn_result.is_ok());
let (_, btree2) = unpack_account_data(&rpc_client, account2, rpc_client.commitment()).unwrap();
assert!(!btree2.contains_key(&mint_key));
}
#[test]
fn test_mint_transfer_burn_fail() {
let (rpc_client, funding_keypair) = rpc_client_from_config().unwrap();
let loaded_wallets = load_user_wallets(&rpc_client, &funding_keypair, rpc_client.commitment());
assert_eq!(loaded_wallets.len(), 3);
let initialized_accounts = load_and_initialize_accounts(
&rpc_client,
Instructions::InitializeAccount as u8,
rpc_client.commitment(),
);
assert_eq!(initialized_accounts.len(), 3);
// Setup key/value data and get accounts used in transactions
let user1 = String::from("User1");
let user2 = String::from("User2");
let mint_key = String::from("test_key_1");
let mint_value = String::from("value for test_key_1");
let bad_key = String::from("bad_key_1");
let (wallet1, account1) = KEYS_DB.wallet_and_account(user1).unwrap();
let (wallet2, account2) = KEYS_DB.wallet_and_account(user2).unwrap();
// Fail empty accounts
let mint_result = mint_transaction(
&rpc_client,
&[],
wallet1,
&mint_key,
&mint_value,
Instructions::FreeMint as u8,
rpc_client.commitment(),
);
assert!(mint_result.is_err());
// Do mint to User1
let mint_result = mint_transaction(
&rpc_client,
&[
AccountMeta::new(account1.pubkey(), false),
AccountMeta::new(wallet1.pubkey(), true),
],
wallet1,
&mint_key,
&mint_value,
Instructions::FreeMint as u8,
rpc_client.commitment(),
);
assert!(mint_result.is_ok());
let (_, btree) = unpack_account_data(&rpc_client, account1, rpc_client.commitment()).unwrap();
assert!(btree.contains_key(&mint_key));
// Attempt to mint something already minted for User1
let mint_result = mint_transaction(
&rpc_client,
&[
AccountMeta::new(account1.pubkey(), false),
AccountMeta::new(wallet1.pubkey(), true),
],
wallet1,
&mint_key,
&mint_value,
Instructions::FreeMint as u8,
rpc_client.commitment(),
);
assert!(mint_result.is_err());
// Attempt to transfer something that does exist
let transfer_result = transfer_instruction(
&rpc_client,
&[
AccountMeta::new(account1.pubkey(), false),
AccountMeta::new(account2.pubkey(), false),
AccountMeta::new(wallet1.pubkey(), true),
],
wallet1,
&bad_key,
Instructions::FreeTransfer as u8,
rpc_client.commitment(),
);
assert!(transfer_result.is_err());
// Attempt to burn something that does not exist
let burn_result = burn_instruction(
&rpc_client,
&[
AccountMeta::new(account2.pubkey(), false),
AccountMeta::new(wallet2.pubkey(), true),
],
wallet2,
&mint_key,
Instructions::FreeBurn as u8,
rpc_client.commitment(),
);
assert!(burn_result.is_err());
}
|
// Copyright 2023 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::optimizer::rule::Rule;
use crate::optimizer::rule::TransformResult;
use crate::optimizer::RuleID;
use crate::optimizer::SExpr;
use crate::plans::Aggregate;
use crate::plans::AggregateMode;
use crate::plans::Filter;
use crate::plans::PatternPlan;
use crate::plans::RelOp;
use crate::plans::RelOp::Pattern;
use crate::plans::RelOperator;
/// Heuristic optimizer runs in a bottom-up recursion fashion. If we match a plan like
/// Filter-Aggregate-* and push down filter to Filter(Optional)-Aggregate-Filter-*, this will not
/// work. RuleSplitAggregate will be applied first, since it's bottom up, then this rule, which
/// cause the plan be like Filter(Optional)-Aggregate-Filter-Aggregate-*, which makes no sense.
/// Hence we match 2 bundled Aggregate Ops:
///
/// Input: Filter
/// \
/// Aggregate(Final)
/// \
/// Aggregate(Partial)
/// \
/// *
///
/// Output: Filter(Optional)
/// \
/// Aggregate(Final)
/// \
/// Aggregate(Partial)
/// \
/// Filter
/// \
/// *
pub struct RulePushDownFilterAggregate {
id: RuleID,
pattern: SExpr,
}
impl RulePushDownFilterAggregate {
pub fn new() -> Self {
Self {
id: RuleID::PushDownFilterAggregate,
pattern: SExpr::create_unary(
PatternPlan {
plan_type: RelOp::Filter,
}
.into(),
SExpr::create_unary(
PatternPlan {
plan_type: RelOp::Aggregate,
}
.into(),
SExpr::create_unary(
PatternPlan {
plan_type: RelOp::Aggregate,
}
.into(),
SExpr::create_leaf(PatternPlan { plan_type: Pattern }.into()),
),
),
),
}
}
}
impl Rule for RulePushDownFilterAggregate {
fn id(&self) -> RuleID {
self.id
}
fn apply(&self, s_expr: &SExpr, state: &mut TransformResult) -> common_exception::Result<()> {
let filter: Filter = s_expr.plan().clone().try_into()?;
if filter.is_having {
let agg_parent = s_expr.child(0)?;
let agg_parent_plan: Aggregate = agg_parent.plan().clone().try_into()?;
let agg_child = agg_parent.child(0)?;
let agg_child_plan: Aggregate = agg_child.plan().clone().try_into()?;
if agg_parent_plan.mode == AggregateMode::Final
&& agg_child_plan.mode == AggregateMode::Partial
{
let mut push_predicates = vec![];
let mut remaining_predicates = vec![];
for predicate in filter.predicates {
let used_columns = predicate.used_columns();
let mut pushable = true;
for col in used_columns {
if !agg_parent_plan.group_columns()?.contains(&col) {
pushable = false;
break;
}
}
if pushable {
push_predicates.push(predicate);
} else {
remaining_predicates.push(predicate);
}
}
let mut result: SExpr;
// No change since nothing can be pushed down.
if push_predicates.is_empty() {
result = s_expr.clone();
} else {
let filter_push_down_expr = SExpr::create_unary(
RelOperator::Filter(Filter {
predicates: push_predicates,
is_having: false,
}),
agg_child.child(0)?.clone(),
);
let agg_with_filter_push_down_expr = SExpr::create_unary(
RelOperator::Aggregate(agg_parent_plan),
SExpr::create_unary(
RelOperator::Aggregate(agg_child_plan),
filter_push_down_expr,
),
);
// All filters are pushed down.
if remaining_predicates.is_empty() {
result = agg_with_filter_push_down_expr;
} else {
// Partial filter can be pushed down.
result = SExpr::create_unary(
RelOperator::Filter(Filter {
predicates: remaining_predicates,
is_having: true,
}),
agg_with_filter_push_down_expr,
);
}
}
result.set_applied_rule(&self.id);
state.add_result(result);
}
}
Ok(())
}
fn pattern(&self) -> &SExpr {
&self.pattern
}
}
|
use std::time::{Duration, Instant};
use actix::{
fut, Actor, ActorContext, ActorFutureExt, Addr, AsyncContext, Context, ContextFutureSpawner,
Handler, StreamHandler, WrapFuture,
};
use actix_web::{web, HttpRequest, HttpResponse};
use actix_web_actors::ws;
use serde::Deserialize;
use serde_json;
use uuid::Uuid;
use errors::Error;
pub mod client_messages;
mod server;
pub use self::server::*;
const HEARTBEAT_INTERVAL: Duration = Duration::from_secs(5);
const CLIENT_TIMEOUT: Duration = Duration::from_secs(30);
#[derive(Deserialize)]
struct AuthReq {
token: String,
}
pub struct WebSocketSession {
id: String,
hb: Instant,
server_addr: Addr<Server>,
}
impl WebSocketSession {
fn new(server_addr: Addr<Server>) -> Self {
Self {
id: Uuid::new_v4().to_string(),
hb: Instant::now(),
server_addr,
}
}
fn send_heartbeat(&self, ctx: &mut <Self as Actor>::Context) {
ctx.run_interval(HEARTBEAT_INTERVAL, |act, ctx| {
if Instant::now().duration_since(act.hb) > CLIENT_TIMEOUT {
info!("Websocket Client heartbeat failed, disconnecting!");
act.server_addr.do_send(Disconnect { id: act.id.clone() });
// stop actor
ctx.stop();
// don't try to send a ping
return;
}
ctx.ping(b"");
});
}
}
impl Actor for WebSocketSession {
type Context = ws::WebsocketContext<Self>;
fn started(&mut self, ctx: &mut Self::Context) {
self.send_heartbeat(ctx);
let session_addr = ctx.address();
self.server_addr
.send(Connect {
addr: session_addr.recipient(),
id: self.id.clone(),
})
.into_actor(self)
.then(|res, _act, ctx| {
match res {
Ok(_res) => {}
_ => ctx.stop(),
}
fut::ready(())
})
.wait(ctx);
}
}
impl Handler<Message> for WebSocketSession {
type Result = ();
fn handle(&mut self, msg: Message, ctx: &mut Self::Context) {
ctx.text(msg.0);
}
}
impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for WebSocketSession {
fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {
match msg {
Ok(ws::Message::Ping(msg)) => {
self.hb = Instant::now();
ctx.pong(&msg);
}
Ok(ws::Message::Pong(_)) => {
self.hb = Instant::now();
}
Ok(ws::Message::Text(text)) => {
let message = text.trim();
// has routing pattern
if message.starts_with("/") {
let args: Vec<&str> = message.splitn(2, ' ').collect();
match args[0] {
"/auth" => {
let params: Result<AuthReq, serde_json::Error> =
serde_json::from_str(args[1]);
if let Ok(params) = params {
self.server_addr.do_send(Auth {
id: self.id.clone(),
token: params.token,
});
} else {
ctx.text("Invalid request params");
}
}
_ => ctx.text(format!("unknown command {:?}", message)),
}
}
}
Ok(ws::Message::Binary(bin)) => ctx.binary(bin),
Ok(ws::Message::Close(reason)) => {
info!("closed ws session");
self.server_addr.do_send(Disconnect {
id: self.id.clone(),
});
ctx.close(reason);
ctx.stop();
}
Err(err) => {
warn!("Error handling msg: {:?}", err);
ctx.stop()
}
_ => ctx.stop(),
}
}
}
pub async fn ws_index(
req: HttpRequest,
stream: web::Payload,
server_addr: web::Data<Addr<Server>>,
) -> Result<HttpResponse, Error> {
let res = ws::start(
WebSocketSession::new(server_addr.get_ref().clone()),
&req,
stream,
)?;
Ok(res)
}
|
use std::io;
use std::io::BufRead;
#[macro_use]
mod macros;
mod config;
mod formatter;
fn main() {
// parse cl args and inspect execution context (are we writing to a tty?)
let config = config::Config::new();
// acquire std{in,out} locks
let stdin = io::stdin();
let stdout = io::stdout();
let in_handle = stdin.lock();
let _out_handle = stdout.lock();
// create iterator over stdin lines
let mut lines_iter = io::BufReader::new(in_handle).lines().filter_map(|x| x.ok());
// collect and analyze first n lines
let first_lines: Vec<String> = (&mut lines_iter).take(config.n).collect();
let col_sizes = formatter::analyze(&first_lines, &config.input_sep);
let split_info = formatter::split_available_width(
&col_sizes,
config.width,
config.output_sep.chars().count(),
config.expand,
);
for l in first_lines {
println!(
"{}",
&formatter::format_line(
l,
&split_info,
&config.input_sep,
&config.output_sep,
config.padding,
)
);
}
for l in lines_iter {
println!(
"{}",
formatter::format_line(
l,
&split_info,
&config.input_sep,
&config.output_sep,
config.padding,
)
);
}
}
|
// Copyright 2019-2020 PolkaX. Licensed under MIT or Apache-2.0.
use bytes::Bytes;
use cid::{Cid, ExtMultihashRef, IntoExt};
use multihash::Sha2_256;
use crate::error::Result;
/// The trait for getting raw data and cid of block.
pub trait Block: AsRef<Cid> {
/// Get the raw data of block.
fn raw_data(&self) -> &Bytes;
/// Get the cid.
fn cid(&self) -> &Cid {
self.as_ref()
}
}
impl Block for BasicBlock {
fn raw_data(&self) -> &Bytes {
&self.data
}
}
impl AsRef<Cid> for BasicBlock {
fn as_ref(&self) -> &Cid {
&self.cid
}
}
/// The basic block.
#[derive(Clone, Debug)]
pub struct BasicBlock {
cid: Cid,
data: Bytes,
}
impl BasicBlock {
/// Creates a new `BasicBlock` with given bytes, and its CID is version 0.
pub fn new(data: Bytes) -> BasicBlock {
let sha256_hash = Sha2_256::digest(data.as_ref()).into_ext();
BasicBlock {
data,
cid: Cid::new_v0(sha256_hash).expect("invalid hash for CIDv0"),
}
}
/// Creates a new `BasicBlock` with given bytes and CID.
pub fn new_with_cid(data: Bytes, cid: Cid) -> Result<BasicBlock> {
use crate::error::BlockFormatError;
let hash1 = cid.hash();
let hash2 = hash1.algorithm().digest(data.as_ref());
if hash1 != hash2 {
return Err(BlockFormatError::WrongHash(
hash1.as_bytes().to_vec(),
hash2.as_bytes().to_vec(),
));
}
Ok(BasicBlock { data, cid })
}
/// Get the multihash of cid of the basic block.
pub fn multihash(&self) -> ExtMultihashRef {
self.cid.hash()
}
}
impl std::fmt::Display for BasicBlock {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "[Block {:?}]", self)
}
}
|
use std::collections::HashMap;
use std::io::Write;
use std::net::SocketAddr;
use std::net::TcpListener;
use std::net::ToSocketAddrs;
use std::net::{IpAddr, Ipv4Addr};
use std::sync::mpsc;
use std::thread;
use std::time::Duration;
use super::client::Client;
use super::client::ClientPayload;
use super::client::PayloadSignal;
pub struct Server {
server_address: SocketAddr,
max_clients: usize,
max_buffer: usize,
clients: HashMap<SocketAddr, Client>,
}
impl Server {
pub fn new<A: ToSocketAddrs>(a: A) -> Server {
let server_address = match a.to_socket_addrs() {
Ok(mut addrs) => {
if let Some(addr) = addrs.next() {
addr
} else {
println!("Could not run the server on that address, use fallback instead: 127.0.0.1:2424");
SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 2424)
}
}
Err(_) => panic!("Invalid address given"),
};
Server {
server_address,
max_clients: 1,
max_buffer: 32,
clients: HashMap::with_capacity(1),
}
}
pub fn max_clients(mut self, max_clients: usize) -> Server {
if max_clients == self.max_clients {
return self;
}
self.max_clients = max_clients;
self.clients = HashMap::with_capacity(max_clients);
self
}
pub fn max_acceptable_buffer(mut self, max_acceptable_buffer: usize) -> Server {
/* Default value as a threshold */
if max_acceptable_buffer > self.max_buffer {
self.max_buffer = max_acceptable_buffer;
}
self
}
pub fn run(mut self) {
let listener = TcpListener::bind(self.server_address)
.expect("Could not run the server, maybe the address and port already reserved?");
listener
.set_nonblocking(true)
.expect("Could not run the server as non-blocking");
println!("Server running on tcp://{}", self.server_address);
println!("Setting maximum client to {}", self.max_clients);
println!(
"Setting maximum acceptable buffer to {} bytes",
self.max_buffer
);
let (tx, rx) = mpsc::channel::<ClientPayload>();
loop {
if let Ok((stream, socket_addr)) = listener.accept() {
/* Dropping new incoming socket if the server full already */
if self.clients.len() == self.max_clients
&& !self.clients.contains_key(&socket_addr)
{
continue;
}
/* Creating new session */
let sender = tx.clone();
let client = Client::new(stream, socket_addr, self.max_buffer, sender);
self.clients.insert(socket_addr, client);
}
if let Ok((socket_addr, payload_signal, message)) = rx.try_recv() {
match payload_signal {
PayloadSignal::InterruptSignal => {
self.clients.remove(&socket_addr);
}
_ => {
if let Some(message) = message {
let fmt = format!("{} -> {}\r\n", socket_addr, message);
print!("{}", fmt);
self.clients = self
.clients
.into_iter()
.filter_map(|(k, mut v)| {
if socket_addr != k {
v.stream.write(fmt.as_bytes()).ok();
}
Some((k, v))
})
.collect();
}
}
}
}
thread::sleep(Duration::from_micros(1));
}
}
}
|
use std::fmt::Debug;
/// if you want an owned trait object, then a Box is the way to go
// types that implement Draw must also implement Debug
pub trait Draw: Debug {
fn draw(&self);
}
#[derive(Debug)]
pub struct Button {
id: usize
}
impl Draw for Button {
fn draw(&self) {
println!("drawing button id={}", self.id);
}
}
#[derive(Debug)]
pub struct DropDown {
id: usize
}
impl Draw for DropDown {
fn draw(&self) {
println!("drawing DropDown id={}", self.id);
}
}
#[derive(Debug)]
pub struct Screen {
pub components: Vec<Box<dyn Draw>>,
}
impl Screen {
pub fn new() -> Screen {
Screen {
components: Vec::new()
}
}
pub fn push(&mut self, drawable: impl Draw + 'static) {
self.components.push(Box::new(drawable) );
}
pub fn run(&self) {
for component in self.components.iter() {
component.draw();
}
}
}
pub fn main() {
let b = Button { id: 343 };
let dd = DropDown { id: 6743 };
let mut screen = Screen::new();
screen.push(dd);
screen.push(b);
screen.run();
dbg!(screen);
} |
#![cfg(feature = "v3")]
//! Conversion traits and helps functions that help converting openapi v2 types to openapi v3.
//! For the OpenAPI v3 types the crate `openapiv3` is used.
mod contact;
mod external_documentation;
mod header;
mod info;
mod license;
mod openapi;
mod operation;
mod parameter;
mod paths;
mod reference;
mod request_body;
mod response;
mod schema;
mod security_scheme;
mod tag;
use super::v2::{models as v2, models::Either};
use parameter::non_body_parameter_to_v3_parameter;
use reference::invalid_referenceor;
use response::OperationEitherResponse;
/// Convert this crates openapi v2 (`DefaultApiRaw`) to `openapiv3::OpenAPI`
pub fn openapiv2_to_v3(v2: v2::DefaultApiRaw) -> openapiv3::OpenAPI {
openapiv3::OpenAPI::from(v2)
}
|
use crate::parse::{Instruction, ParseResult, RollbackableTokenStream};
pub fn octave(stream: &mut RollbackableTokenStream) -> ParseResult {
match stream.take_character() {
Ok((_, '<')) => Ok(Some(Instruction::Octave(1))),
Ok((_, '>')) => Ok(Some(Instruction::Octave(-1))),
_ => Ok(None),
}
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::collections::BTreeMap;
use common_exception::Result;
use crate::optimizer::SExpr;
use crate::plans::Join;
use crate::plans::JoinType;
use crate::plans::Operator;
use crate::plans::RelOp;
use crate::plans::RelOperator;
use crate::plans::RuntimeFilterId;
use crate::plans::RuntimeFilterSource;
use crate::ScalarExpr;
pub struct RuntimeFilterResult {
pub left_runtime_filters: BTreeMap<RuntimeFilterId, ScalarExpr>,
pub right_runtime_filters: BTreeMap<RuntimeFilterId, ScalarExpr>,
}
fn create_runtime_filters(join: &Join) -> Result<RuntimeFilterResult> {
let mut left_runtime_filters = BTreeMap::new();
let mut right_runtime_filters = BTreeMap::new();
for (idx, exprs) in join
.right_conditions
.iter()
.zip(join.left_conditions.iter())
.enumerate()
{
right_runtime_filters.insert(RuntimeFilterId::new(idx), exprs.0.clone());
left_runtime_filters.insert(RuntimeFilterId::new(idx), exprs.1.clone());
}
Ok(RuntimeFilterResult {
left_runtime_filters,
right_runtime_filters,
})
}
fn wrap_runtime_filter_source(
s_expr: &SExpr,
runtime_filter_result: RuntimeFilterResult,
) -> Result<SExpr> {
let source_node = RuntimeFilterSource {
left_runtime_filters: runtime_filter_result.left_runtime_filters,
right_runtime_filters: runtime_filter_result.right_runtime_filters,
};
let build_side = s_expr.child(1)?.clone();
let mut probe_side = s_expr.child(0)?.clone();
probe_side = SExpr::create_binary(source_node.into(), probe_side, build_side.clone());
let mut join: Join = s_expr.plan().clone().try_into()?;
join.contain_runtime_filter = true;
let s_expr = s_expr.replace_plan(RelOperator::Join(join));
Ok(s_expr.replace_children(vec![probe_side, build_side]))
}
// Traverse plan tree and check if exists join
// Currently, only support inner join.
pub fn try_add_runtime_filter_nodes(expr: &SExpr) -> Result<SExpr> {
if expr.children().len() == 1 && expr.children()[0].is_pattern() {
return Ok(expr.clone());
}
let mut new_expr = expr.clone();
if expr.plan.rel_op() == RelOp::Join {
// Todo(xudong): develop a strategy to decide whether to add runtime filter node
new_expr = add_runtime_filter_nodes(expr)?;
}
let mut children = vec![];
for child in new_expr.children.iter() {
children.push(try_add_runtime_filter_nodes(child)?);
}
Ok(new_expr.replace_children(children))
}
fn add_runtime_filter_nodes(expr: &SExpr) -> Result<SExpr> {
assert_eq!(expr.plan.rel_op(), RelOp::Join);
let join: Join = expr.plan().clone().try_into()?;
if join.join_type != JoinType::Inner {
return Ok(expr.clone());
}
let runtime_filter_result = create_runtime_filters(&join)?;
wrap_runtime_filter_source(expr, runtime_filter_result)
}
|
use day07;
fn main() {
let input: Vec<&str> = include_str!("../../input/2018/day7.txt").lines().collect();
println!("Part 1: {}", day07::part1(&input));
println!("Part 2: {}", day07::part2(&input, 5, 60));
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[allow(clippy::module_inception)]
mod share;
pub use share::AddShareAccountsReply;
pub use share::AddShareAccountsReq;
pub use share::CreateShareReply;
pub use share::CreateShareReq;
pub use share::DropShareReply;
pub use share::DropShareReq;
pub use share::GetObjectGrantPrivilegesReply;
pub use share::GetObjectGrantPrivilegesReq;
pub use share::GetShareGrantObjectReply;
pub use share::GetShareGrantObjectReq;
pub use share::GetShareGrantTenants;
pub use share::GetShareGrantTenantsReply;
pub use share::GetShareGrantTenantsReq;
pub use share::GrantShareObjectReply;
pub use share::GrantShareObjectReq;
pub use share::ObjectGrantPrivilege;
pub use share::ObjectSharedByShareIds;
pub use share::RemoveShareAccountsReply;
pub use share::RemoveShareAccountsReq;
pub use share::RevokeShareObjectReply;
pub use share::RevokeShareObjectReq;
pub use share::ShareAccountMeta;
pub use share::ShareAccountNameIdent;
pub use share::ShareAccountReply;
pub use share::ShareDatabaseSpec;
pub use share::ShareGrantEntry;
pub use share::ShareGrantObject;
pub use share::ShareGrantObjectName;
pub use share::ShareGrantObjectPrivilege;
pub use share::ShareGrantObjectSeqAndId;
pub use share::ShareGrantReplyObject;
pub use share::ShareId;
pub use share::ShareIdToName;
pub use share::ShareIdent;
pub use share::ShareInfo;
pub use share::ShareMeta;
pub use share::ShareNameIdent;
pub use share::ShareSpec;
pub use share::ShareTableInfoMap;
pub use share::ShareTableSpec;
pub use share::ShowSharesReply;
pub use share::ShowSharesReq;
pub use share::TableInfoMap;
|
// Copyright 2018 Benjamin Fry <benjaminfry@me.com>
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
extern crate pg_extern_attr;
extern crate pg_extend;
use pg_extern_attr::pg_extern;
use pg_extend::pg_magic;
// This tells Postges this library is a Postgres extension
pg_magic!(version: pg_sys::PG_VERSION_NUM);
/// The pg_extern attribute wraps the function in the proper functions syntax for C extensions
#[pg_extern]
fn concat_rs(mut a: String, b: String) -> String {
a.push_str(&b);
a
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_concat_rs() {
assert_eq!(&concat_rs("a".to_string(), "b".to_string()), "ab");
}
}
|
use std::collections::hash_map::RandomState;
use std::collections::HashMap;
use crate::cli;
use crate::fs;
use crate::geom2d::{Dir, Point};
pub fn run() {
let wires: Vec<Vec<Step>> =
fs::read_lines(cli::aoc_filename("aoc_2019_03.txt"), parse_line).unwrap();
let points = points_on_wire(&wires[0]);
let stats = cross_stats(&points, &wires[1]);
println!("closest intersection: {} steps", stats.closest);
println!("fastest intersection: {} steps", stats.fastest);
}
struct CrossStats {
closest: i32,
fastest: i32,
}
fn cross_stats(points: &HashMap<Point, i32, RandomState>, wire: &Vec<Step>) -> CrossStats {
let mut closest = i32::max_value();
let mut fastest = i32::max_value();
let mut point_count = 0;
walk_wire_by_point(&wire, |p| {
point_count += 1;
if let Some(first_count) = points.get(&p) {
closest = closest.min(p.manhattan_distance());
fastest = fastest.min(first_count + point_count);
}
});
CrossStats { closest, fastest }
}
fn points_on_wire(wire: &Vec<Step>) -> HashMap<Point, i32, RandomState> {
let mut points = HashMap::with_capacity(wire_length(wire));
let mut point_count = 0;
walk_wire_by_point(wire, |p| {
point_count += 1;
points.entry(p).or_insert(point_count);
});
points
}
fn wire_length(wire: &Vec<Step>) -> usize {
let mut count = 1;
walk_wire_by_step(wire, |s| count += s.count);
count
}
fn walk_wire_by_point<F>(wire: &Vec<Step>, mut f: F)
where
F: FnMut(Point),
{
let mut curr = Point::origin();
walk_wire_by_step(wire, |s| {
for _ in 0..s.count {
curr = curr.step(&s.dir);
f(curr);
}
});
}
fn walk_wire_by_step<F>(wire: &Vec<Step>, mut f: F)
where
F: FnMut(&Step),
{
for s in wire.iter() {
f(s);
}
}
fn parse_line(l: &str) -> Vec<Step> {
l.split(',').map(Step::parse).collect()
}
impl Dir {
fn parse(c: char) -> Dir {
match c {
'U' | 'u' => Dir::Up,
'D' | 'd' => Dir::Down,
'R' | 'r' => Dir::Right,
'L' | 'l' => Dir::Left,
_ => panic!(),
}
}
}
#[derive(Debug)]
struct Step {
dir: Dir,
count: usize,
}
impl Step {
fn parse(s: &str) -> Step {
let dir = match s.chars().next() {
Some(c) => Dir::parse(c),
_ => panic!(),
};
Step {
dir,
count: s[1..].parse().unwrap(),
}
}
}
|
#[doc = r"Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r"Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::MODE {
#[doc = r"Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
self.register.set(f(&R { bits }, &mut W { bits }).bits);
}
#[doc = r"Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u32 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = "Possible values of the field `SHAMD5_MODE_ALGO`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SHAMD5_MODE_ALGOR {
#[doc = "MD5"]
SHAMD5_MODE_ALGO_MD5,
#[doc = "SHA-1"]
SHAMD5_MODE_ALGO_SHA1,
#[doc = "SHA-224"]
SHAMD5_MODE_ALGO_SHA224,
#[doc = "SHA-256"]
SHAMD5_MODE_ALGO_SHA256,
#[doc = r"Reserved"]
_Reserved(u8),
}
impl SHAMD5_MODE_ALGOR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
match *self {
SHAMD5_MODE_ALGOR::SHAMD5_MODE_ALGO_MD5 => 0,
SHAMD5_MODE_ALGOR::SHAMD5_MODE_ALGO_SHA1 => 2,
SHAMD5_MODE_ALGOR::SHAMD5_MODE_ALGO_SHA224 => 4,
SHAMD5_MODE_ALGOR::SHAMD5_MODE_ALGO_SHA256 => 6,
SHAMD5_MODE_ALGOR::_Reserved(bits) => bits,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline(always)]
pub fn _from(value: u8) -> SHAMD5_MODE_ALGOR {
match value {
0 => SHAMD5_MODE_ALGOR::SHAMD5_MODE_ALGO_MD5,
2 => SHAMD5_MODE_ALGOR::SHAMD5_MODE_ALGO_SHA1,
4 => SHAMD5_MODE_ALGOR::SHAMD5_MODE_ALGO_SHA224,
6 => SHAMD5_MODE_ALGOR::SHAMD5_MODE_ALGO_SHA256,
i => SHAMD5_MODE_ALGOR::_Reserved(i),
}
}
#[doc = "Checks if the value of the field is `SHAMD5_MODE_ALGO_MD5`"]
#[inline(always)]
pub fn is_shamd5_mode_algo_md5(&self) -> bool {
*self == SHAMD5_MODE_ALGOR::SHAMD5_MODE_ALGO_MD5
}
#[doc = "Checks if the value of the field is `SHAMD5_MODE_ALGO_SHA1`"]
#[inline(always)]
pub fn is_shamd5_mode_algo_sha1(&self) -> bool {
*self == SHAMD5_MODE_ALGOR::SHAMD5_MODE_ALGO_SHA1
}
#[doc = "Checks if the value of the field is `SHAMD5_MODE_ALGO_SHA224`"]
#[inline(always)]
pub fn is_shamd5_mode_algo_sha224(&self) -> bool {
*self == SHAMD5_MODE_ALGOR::SHAMD5_MODE_ALGO_SHA224
}
#[doc = "Checks if the value of the field is `SHAMD5_MODE_ALGO_SHA256`"]
#[inline(always)]
pub fn is_shamd5_mode_algo_sha256(&self) -> bool {
*self == SHAMD5_MODE_ALGOR::SHAMD5_MODE_ALGO_SHA256
}
}
#[doc = "Values that can be written to the field `SHAMD5_MODE_ALGO`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SHAMD5_MODE_ALGOW {
#[doc = "MD5"]
SHAMD5_MODE_ALGO_MD5,
#[doc = "SHA-1"]
SHAMD5_MODE_ALGO_SHA1,
#[doc = "SHA-224"]
SHAMD5_MODE_ALGO_SHA224,
#[doc = "SHA-256"]
SHAMD5_MODE_ALGO_SHA256,
}
impl SHAMD5_MODE_ALGOW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline(always)]
pub fn _bits(&self) -> u8 {
match *self {
SHAMD5_MODE_ALGOW::SHAMD5_MODE_ALGO_MD5 => 0,
SHAMD5_MODE_ALGOW::SHAMD5_MODE_ALGO_SHA1 => 2,
SHAMD5_MODE_ALGOW::SHAMD5_MODE_ALGO_SHA224 => 4,
SHAMD5_MODE_ALGOW::SHAMD5_MODE_ALGO_SHA256 => 6,
}
}
}
#[doc = r"Proxy"]
pub struct _SHAMD5_MODE_ALGOW<'a> {
w: &'a mut W,
}
impl<'a> _SHAMD5_MODE_ALGOW<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SHAMD5_MODE_ALGOW) -> &'a mut W {
unsafe { self.bits(variant._bits()) }
}
#[doc = "MD5"]
#[inline(always)]
pub fn shamd5_mode_algo_md5(self) -> &'a mut W {
self.variant(SHAMD5_MODE_ALGOW::SHAMD5_MODE_ALGO_MD5)
}
#[doc = "SHA-1"]
#[inline(always)]
pub fn shamd5_mode_algo_sha1(self) -> &'a mut W {
self.variant(SHAMD5_MODE_ALGOW::SHAMD5_MODE_ALGO_SHA1)
}
#[doc = "SHA-224"]
#[inline(always)]
pub fn shamd5_mode_algo_sha224(self) -> &'a mut W {
self.variant(SHAMD5_MODE_ALGOW::SHAMD5_MODE_ALGO_SHA224)
}
#[doc = "SHA-256"]
#[inline(always)]
pub fn shamd5_mode_algo_sha256(self) -> &'a mut W {
self.variant(SHAMD5_MODE_ALGOW::SHAMD5_MODE_ALGO_SHA256)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits &= !(7 << 0);
self.w.bits |= ((value as u32) & 7) << 0;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SHAMD5_MODE_ALGO_CONSTANTR {
bits: bool,
}
impl SHAMD5_MODE_ALGO_CONSTANTR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SHAMD5_MODE_ALGO_CONSTANTW<'a> {
w: &'a mut W,
}
impl<'a> _SHAMD5_MODE_ALGO_CONSTANTW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 3);
self.w.bits |= ((value as u32) & 1) << 3;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SHAMD5_MODE_CLOSE_HASHR {
bits: bool,
}
impl SHAMD5_MODE_CLOSE_HASHR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SHAMD5_MODE_CLOSE_HASHW<'a> {
w: &'a mut W,
}
impl<'a> _SHAMD5_MODE_CLOSE_HASHW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 4);
self.w.bits |= ((value as u32) & 1) << 4;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SHAMD5_MODE_HMAC_KEY_PROCR {
bits: bool,
}
impl SHAMD5_MODE_HMAC_KEY_PROCR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SHAMD5_MODE_HMAC_KEY_PROCW<'a> {
w: &'a mut W,
}
impl<'a> _SHAMD5_MODE_HMAC_KEY_PROCW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 5);
self.w.bits |= ((value as u32) & 1) << 5;
self.w
}
}
#[doc = r"Value of the field"]
pub struct SHAMD5_MODE_HMAC_OUTER_HASHR {
bits: bool,
}
impl SHAMD5_MODE_HMAC_OUTER_HASHR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _SHAMD5_MODE_HMAC_OUTER_HASHW<'a> {
w: &'a mut W,
}
impl<'a> _SHAMD5_MODE_HMAC_OUTER_HASHW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 7);
self.w.bits |= ((value as u32) & 1) << 7;
self.w
}
}
impl R {
#[doc = r"Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:2 - Hash Algorithm"]
#[inline(always)]
pub fn shamd5_mode_algo(&self) -> SHAMD5_MODE_ALGOR {
SHAMD5_MODE_ALGOR::_from(((self.bits >> 0) & 7) as u8)
}
#[doc = "Bit 3 - The initial digest register will be overwritten with the algorithm constants for the selected algorithm when hashing and the initial digest count register will be reset to 0"]
#[inline(always)]
pub fn shamd5_mode_algo_constant(&self) -> SHAMD5_MODE_ALGO_CONSTANTR {
let bits = ((self.bits >> 3) & 1) != 0;
SHAMD5_MODE_ALGO_CONSTANTR { bits }
}
#[doc = "Bit 4 - Performs the padding, the Hash/HMAC will be 'closed' at the end of the block, as per MD5/SHA-1/SHA-2 specification"]
#[inline(always)]
pub fn shamd5_mode_close_hash(&self) -> SHAMD5_MODE_CLOSE_HASHR {
let bits = ((self.bits >> 4) & 1) != 0;
SHAMD5_MODE_CLOSE_HASHR { bits }
}
#[doc = "Bit 5 - HMAC Key Processing Enable"]
#[inline(always)]
pub fn shamd5_mode_hmac_key_proc(&self) -> SHAMD5_MODE_HMAC_KEY_PROCR {
let bits = ((self.bits >> 5) & 1) != 0;
SHAMD5_MODE_HMAC_KEY_PROCR { bits }
}
#[doc = "Bit 7 - HMAC Outer Hash Processing Enable"]
#[inline(always)]
pub fn shamd5_mode_hmac_outer_hash(&self) -> SHAMD5_MODE_HMAC_OUTER_HASHR {
let bits = ((self.bits >> 7) & 1) != 0;
SHAMD5_MODE_HMAC_OUTER_HASHR { bits }
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:2 - Hash Algorithm"]
#[inline(always)]
pub fn shamd5_mode_algo(&mut self) -> _SHAMD5_MODE_ALGOW {
_SHAMD5_MODE_ALGOW { w: self }
}
#[doc = "Bit 3 - The initial digest register will be overwritten with the algorithm constants for the selected algorithm when hashing and the initial digest count register will be reset to 0"]
#[inline(always)]
pub fn shamd5_mode_algo_constant(&mut self) -> _SHAMD5_MODE_ALGO_CONSTANTW {
_SHAMD5_MODE_ALGO_CONSTANTW { w: self }
}
#[doc = "Bit 4 - Performs the padding, the Hash/HMAC will be 'closed' at the end of the block, as per MD5/SHA-1/SHA-2 specification"]
#[inline(always)]
pub fn shamd5_mode_close_hash(&mut self) -> _SHAMD5_MODE_CLOSE_HASHW {
_SHAMD5_MODE_CLOSE_HASHW { w: self }
}
#[doc = "Bit 5 - HMAC Key Processing Enable"]
#[inline(always)]
pub fn shamd5_mode_hmac_key_proc(&mut self) -> _SHAMD5_MODE_HMAC_KEY_PROCW {
_SHAMD5_MODE_HMAC_KEY_PROCW { w: self }
}
#[doc = "Bit 7 - HMAC Outer Hash Processing Enable"]
#[inline(always)]
pub fn shamd5_mode_hmac_outer_hash(&mut self) -> _SHAMD5_MODE_HMAC_OUTER_HASHW {
_SHAMD5_MODE_HMAC_OUTER_HASHW { w: self }
}
}
|
//! Streaming bodies for Requests and Responses
//!
//! For both [Clients](::client) and [Servers](::server), requests and
//! responses use streaming bodies, instead of complete buffering. This
//! allows applications to not use memory they don't need, and allows exerting
//! back-pressure on connections by only reading when asked.
//!
//! There are two pieces to this in hyper:
//!
//! - The [`Payload`](body::Payload) trait the describes all possible bodies. hyper
//! allows any body type that implements `Payload`, allowing applications to
//! have fine-grained control over their streaming.
//! - The [`Body`](Body) concrete type, which is an implementation of `Payload`,
//! and returned by hyper as a "receive stream" (so, for server requests and
//! client responses). It is also a decent default implementation if you don't
//! have very custom needs of your send streams.
pub use self::body::{Body, Sender};
pub use self::chunk::Chunk;
pub use self::payload::Payload;
mod body;
mod chunk;
mod payload;
// The full_data API is not stable, so these types are to try to prevent
// users from being able to:
//
// - Implment `__hyper_full_data` on their own Payloads.
// - Call `__hyper_full_data` on any Payload.
//
// That's because to implement it, they need to name these types, and
// they can't because they aren't exported. And to call it, they would
// need to create one of these values, which they also can't.
pub(crate) mod internal {
#[allow(missing_debug_implementations)]
pub struct FullDataArg(pub(crate) ());
#[allow(missing_debug_implementations)]
pub struct FullDataRet<B>(pub(crate) Option<B>);
}
fn _assert_send_sync() {
fn _assert_send<T: Send>() {}
fn _assert_sync<T: Sync>() {}
_assert_send::<Body>();
_assert_send::<Chunk>();
_assert_sync::<Chunk>();
}
|
#![feature(seek_stream_len)]
mod archive;
use archive::{Archive, VersionNumber};
fn main() {
let mut archive = Archive::new("E:\\Software Projects\\IntelliJ\\gud_archive\\test");
archive.create();
let mut appender = archive.appender(VersionNumber{number: 133}, String::from("Initial things"));
use std::env::{current_dir, set_current_dir};
let current = current_dir().unwrap();
set_current_dir("E:\\Software Projects\\IntelliJ\\gud_archive").unwrap();
appender.append_snapshot("a.txt");
appender.append_snapshot("b.txt");
appender.finish();
set_current_dir(current).unwrap();
let mut reader = archive.reader();
let mut s = Vec::new();
let mut taken = reader.file(0, "a.txt", & mut s).unwrap();
println!("{}", String::from_utf8(s).unwrap());
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - SYSCFG configuration register 1"]
pub cfgr1: CFGR1,
_reserved1: [u8; 20usize],
#[doc = "0x18 - SYSCFG configuration register 1"]
pub cfgr2: CFGR2,
}
#[doc = "SYSCFG configuration register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cfgr1](cfgr1) module"]
pub type CFGR1 = crate::Reg<u32, _CFGR1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CFGR1;
#[doc = "`read()` method returns [cfgr1::R](cfgr1::R) reader structure"]
impl crate::Readable for CFGR1 {}
#[doc = "`write(|w| ..)` method takes [cfgr1::W](cfgr1::W) writer structure"]
impl crate::Writable for CFGR1 {}
#[doc = "SYSCFG configuration register 1"]
pub mod cfgr1;
#[doc = "SYSCFG configuration register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cfgr2](cfgr2) module"]
pub type CFGR2 = crate::Reg<u32, _CFGR2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CFGR2;
#[doc = "`read()` method returns [cfgr2::R](cfgr2::R) reader structure"]
impl crate::Readable for CFGR2 {}
#[doc = "`write(|w| ..)` method takes [cfgr2::W](cfgr2::W) writer structure"]
impl crate::Writable for CFGR2 {}
#[doc = "SYSCFG configuration register 1"]
pub mod cfgr2;
|
use wasm_bindgen::prelude::*;
/// Colors: Models the rgba color type
/// red: u8
/// green: u8
/// blue: u8
/// alpha: f64
#[wasm_bindgen]
#[derive(Clone, Copy, Debug)]
pub struct Colors{
red: u16,
green: u16,
blue: u16,
alpha: f64,
}
#[wasm_bindgen]
impl Colors{
pub fn new(red: u16, green: u16, blue: u16, alpha: f64) -> Self{
Colors{
red: red.rem_euclid(256),
green: green.rem_euclid(256),
blue: blue.rem_euclid(256),
alpha : if alpha>1.0{
1.0
} else if alpha <0.0{
0.0
} else{
alpha
}
}
}
pub fn rgba(&self) -> String{
format!("rgba({}, {}, {}, {})", self.red, self.green, self.blue, self.alpha)
}
} |
use std::io;
fn main() {
// 一行目を取得、分割
let mut one_line_buffer = String::new();
io::stdin().read_line(&mut one_line_buffer).unwrap();
let one_line_vec: Vec<i64> = one_line_buffer
.trim()
.split_whitespace()
.map(|c| c.parse().unwrap())
.collect();
let popular_product_num = one_line_vec[1]; //M
// 二行目を取得、分割
let mut two_line_buffer = String::new();
io::stdin().read_line(&mut two_line_buffer).unwrap();
let mut vote_nums: Vec<i64> = two_line_buffer
.trim()
.split_whitespace()
.map(|c| c.parse().unwrap())
.collect();
// 処理
let total_vote_num = {
let mut total: i64 = 0;
for i in &vote_nums {
total = total + i;
}
total
};
let judge_line_num = total_vote_num as f64 / (4 * popular_product_num) as f64;
vote_nums.sort_by(|a, b| b.cmp(a));
// for i in &vote_nums {
// println!("{}", i);
// }
let popular_product_votes: Vec<i64> = vote_nums[..(popular_product_num as usize)].to_vec();
// println!("productVotes");
// for i in &popular_product_votes {
// println!("{}", i);
// }
let mut is_yes = true;
for i in &popular_product_votes {
if (*i as f64) < judge_line_num {
is_yes = false;
}
}
if is_yes {
println!("Yes");
} else {
println!("No");
}
}
|
use std::sync::Arc;
use libloading;
use serde::{Serialize, Deserialize};
use std::fmt;
use std::collections::HashMap;
use crate::{ ReporterConfig, PublisherConfig };
/// List of plugin function types
pub enum FunctionType {
/// A publisher function is called every time a report has been
/// generated
Publisher(Box<dyn PublisherFunction + Send>),
/// A publisher init function is called when psistats service starts
PublisherInit(Box<dyn PublisherInitFunction + Send>),
/// A reporter function is called on demand and/or on an interval
/// It generates reports that can then be published
Reporter(Box<dyn ReporterFunction + Send>),
/// A reporter init function is called when psistats service starts
ReporterInit(Box<dyn ReporterInitFunction + Send>)
}
/// A reporter function is called on demand and/or on an interval
/// It generates reports that can then be published
pub trait ReporterFunction {
fn call(&self, config: &ReporterConfig) -> Result<PsistatsReport, PluginError>;
}
/// A reporter init function is called when psistats service starts
pub trait ReporterInitFunction {
fn call(&self, config: &ReporterConfig) -> Result<(), PluginError>;
}
/// A publisher function is called every time a report has been
/// generated
pub trait PublisherFunction {
fn call(&self, report: &PsistatsReport, config: &PublisherConfig) -> Result<(), PluginError>;
}
/// A publisher init function is called when psistats service starts
pub trait PublisherInitFunction {
fn call(&self, config: &PublisherConfig) -> Result<(), PluginError>;
}
#[derive(Debug, Clone)]
pub enum PluginError {
FunctionNotFound { p: String, fname: String },
PluginFileNotFound { p: String },
PluginDeclNotFound { p: String },
Other { p: String, msg: String },
Runtime { p: String, msg: String },
}
impl fmt::Display for PluginError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
PluginError::FunctionNotFound { p, fname } => { write!(f, "Plugin {} is lacking function {}", p, fname) },
PluginError::PluginFileNotFound { p } => { write!(f, "Could not find plugin file for plugin {}", p) },
PluginError::PluginDeclNotFound { p } => { write!(f, "Plugin declration not found for plugin {}", p) },
PluginError::Other { p, msg } => { write!(f, "Error with plugin {}: {}", p, msg) },
PluginError::Runtime { p, msg } => { write!(f, "Plugin {} failed to execute: {}", p, msg) }
}
}
}
impl std::error::Error for PluginError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
}
/// A plugin registrar is used to register plugins.
pub trait PluginRegistrar {
/// Each plugin will need to call this method to register
/// each function the plugin as available
fn register_plugin(&mut self, name: &str, func: FunctionType);
/// Register a plugin library. It's necessary to keep a reference
/// of the plugin library active.
fn register_lib(
&mut self, lib: Arc<libloading::Library>
);
/// Get a reporter init function
fn get_reporter_init(&self, name: &str) -> Result<&Box<dyn ReporterInitFunction + Send>, PluginError>;
/// Get a reporter function
fn get_reporter(&self, name: &str) -> Result<&Box<dyn ReporterFunction + Send>, PluginError>;
/// Get a publisher init function
fn get_publisher_init(&self, name: &str) -> Result<&Box<dyn PublisherInitFunction + Send>, PluginError>;
/// Get a publisher function
fn get_publisher(&self, name: &str) -> Result<&Box<dyn PublisherFunction + Send>, PluginError>;
}
/// Every plugin must expose (usually with the export_plugin! macro)
/// that defines a register method. This method is given a PluginRegistrar
/// so that the plugin can then register its functions with the given registrar
#[derive(Copy, Clone)]
pub struct PsistatsPlugin {
pub register: unsafe extern "C" fn(&mut Box<dyn PluginRegistrar + 'static + Send>),
}
#[macro_export]
macro_rules! export_plugin {
($register:expr) => {
#[doc(hidden)]
#[no_mangle]
pub static PSISTATS_PLUGIN: $crate::plugins::api::PsistatsPlugin =
$crate::plugins::api::PsistatsPlugin {
register: $register
};
};
}
/// A possible report value
#[derive(Deserialize, Serialize, Clone, Debug)]
pub enum ReportValue {
Integer(u64),
Float(f64),
String(String),
Array(Vec<ReportValue>),
Object(HashMap<String, ReportValue>)
}
/// A PsistatsReport is what a reporter function should return
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct PsistatsReport {
pub id: String,
pub value: ReportValue
}
impl PsistatsReport {
pub fn new(id: &str, value: ReportValue) -> Self {
PsistatsReport {
id: id.to_string(),
value: value
}
}
pub fn get_id(&self) -> &String {
return &self.id;
}
pub fn get_value(&self) -> &ReportValue {
return &self.value;
}
}
|
use std::ops::Deref;
use crate::{InputType, InputValueError};
pub fn min_items<T: Deref<Target = [E]> + InputType, E>(
value: &T,
len: usize,
) -> Result<(), InputValueError<T>> {
if value.deref().len() >= len {
Ok(())
} else {
Err(format!(
"the value length is {}, must be greater than or equal to {}",
value.deref().len(),
len
)
.into())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_min_items() {
assert!(min_items(&vec![1, 2], 3).is_err());
assert!(min_items(&vec![1, 2, 3], 3).is_ok());
assert!(min_items(&vec![1, 2, 3, 4], 3).is_ok());
}
}
|
use std::io::Write;
use std::path::PathBuf;
use std::slice;
use std::thread;
use std::fs::{self, File};
use std::process::Command;
use std::rc::Rc;
use std::cell::RefCell;
use futures::{IntoFuture, Future, Stream};
use futures::sync::mpsc::{channel, Sender};
use tokio_core::reactor::Handle;
use crate::error::{Result, Error};
use hex_music_container::{Container, Configuration, error::Error as MusicError};
use hex_database::Track;
use hex_server_protocol::objects::DownloadProgress;
use hex_server_protocol::PacketId;
fn worker(mut sender: Sender<DownloadProgress>, id: PacketId, format: String, tracks: Vec<Track>, num_channel: u32, data_path: PathBuf) -> Result<()> {
let mut out_files = Vec::new();
let download_path = data_path.join("download");
println!("start working!");
for i in 0..tracks.len() {
println!("processing {}", i);
sender.try_send(DownloadProgress {
id: id.clone(),
format: format.clone(),
progress: i as f32 / tracks.len() as f32,
download: None
}).map_err(|_| Error::ChannelFailed)?;
let file_path = data_path.join(&tracks[i].key.to_string());
let file_path_out = download_path
.join(&tracks[i].interpret.clone().unwrap_or("unknown".into()))
.join(&tracks[i].album.clone().unwrap_or("unknown".into()));
fs::create_dir_all(&file_path_out).unwrap();
let file_path_out = file_path_out.join(tracks[i].title.clone().unwrap_or(tracks[i].key.to_string()));
let file = File::open(&file_path)
.map_err(|err| Error::Io(err))?;
let mut container = Container::load(file)
.map_err(|err| Error::MusicContainer(err))?;
let mut out = File::create(&file_path_out)
.map_err(|err| Error::Io(err))?;
println!("convert start");
loop {
match container.next_packet(Configuration::Stereo) {
Ok(buf) => {
let buf: &[u8] = unsafe {
slice::from_raw_parts(
buf.as_ptr() as *const u8,
buf.len() * 2
)
};
out.write(&buf).unwrap();
},
Err(MusicError::ReachedEnd) => break,
Err(err) => { return Err(Error::MusicContainer(err)); }
}
}
println!("convert end");
let converted_file = file_path_out.with_extension(format.clone());
Command::new("ffmpeg")
.arg("-y")
.arg("-ar").arg("48k")
.arg("-ac").arg("2")
.arg("-f").arg("s16le")
.arg("-i").arg(file_path_out.to_str().unwrap())
.arg(converted_file.to_str().unwrap())
.spawn().expect("Could not start ffmpeg!").wait().unwrap();
println!("ffmpeg end");
out_files.push(converted_file);
}
Command::new("tar")
.arg("cvzf")
.arg(download_path.join(format!("{}.tar.gz", id[0])))
.args(out_files)
.spawn().expect("Could not start tar!").wait().unwrap();
sender.try_send(DownloadProgress {
id: id.clone(),
format: format,
progress: 1.0,
download: Some(format!("/data/download/{}.tar.gz", id[0]))
}).map_err(|_| Error::ChannelFailed)?;
Ok(())
}
pub struct DownloadState {
pub handle: Handle,
progress: Rc<RefCell<DownloadProgress>>
}
impl DownloadState {
pub fn new(handle: Handle, id: PacketId, format: String, tracks: Vec<Track>, num_channel: u32, data_path: PathBuf) -> DownloadState {
let (sender, recv) = channel(10);
thread::spawn(move || {
worker(sender, id, format, tracks, num_channel, data_path).unwrap();
});
let progress = Rc::new(RefCell::new(DownloadProgress::empty()));
let progress2 = progress.clone();
let hnd = recv.map(move |x| {
*((*progress2).borrow_mut()) = x;
()
}).for_each(|_| Ok(())).into_future().map(|_| ()).map_err(|_| ());
handle.spawn(hnd);
DownloadState {
handle: handle,
progress: progress
}
}
pub fn progress(&self) -> DownloadProgress {
let tmp: DownloadProgress = (*self.progress).borrow().clone();
tmp
}
}
|
use core::f32;
use std::time::{SystemTime};
pub struct Timer {
timer: SystemTime,
}
impl Timer {
pub fn new() -> Timer {
Timer {
timer : SystemTime::now()
}
}
fn print(&self) {
let elapsed = self.timer.elapsed().unwrap().as_millis() as f32;
println!("Elapsed: {} [ms]", elapsed)
}
}
impl Drop for Timer {
fn drop(&mut self) {
self.print();
}
} |
use std::iter::FromIterator;
use proc_macro::{TokenTree, TokenStream};
// punct_eq generate a closure to judge whether a TokenTree is
// the given punct, which usually use in iter.filter() method.
pub fn punct_eq(c: char) -> impl FnMut(&TokenTree) -> bool {
move |x: &TokenTree| {
if let TokenTree::Punct(p) = x {
return p.as_char() == c;
}
false
}
}
// split the given TokenStream to a Vec<TokenStream> by char, usually
// use to split generic parameters because rust does not treated angle
// bracket as a valid group delimiter. It is preferred to use split_tokens_by
// than slice.split when input is elegantly split by separator, and every
// scope has valid element. This function will stop collecting when the
// last token is the separator so it wouldn't contain an empty TokenStream at end.
pub fn split_tokens_by(input: TokenStream, sep: &str) -> Vec<TokenStream> {
// manually split generics by punct sep
// HINT: because there are nested generics parameters, so we
// can't split by sep directly on stream slice, we should
// make sure split with sep just in the most outside scope.
// look like: "T: SomeTrait<R, P>, U"
let mut tokens: Vec<TokenStream> = Vec::new();
// let's process, there are two loop scopes
let mut iter = input
.into_iter()
.peekable();
let mut n = 0;
// the outer loop is for collecting all the tokens in generic,
// split by sep, look the same as slice.split, but it handles
// sep inside a generic("<...>"), which was not treated as a
// separator.
loop {
let mut inner = Vec::new();
// the inner loop is for collection tokens before a sep separator,
// when it hit the separator, it breaks. Only when n == 0, a sep
// was treated as a separator.
loop {
match iter.next() {
Some(next) => {
// case #1: '<' -> n++
// case #2: '>' -> n--
match &next {
// HINT: '>' after '-' is not case #2
TokenTree::Punct(punct) if punct.as_char() == '-' => {
match iter.peek() {
Some(TokenTree::Punct(peek)) if peek.as_char() == '>' => {
inner.push(next);
inner.push(iter.next().unwrap());
continue;
}
_ => ()
}
}
// case #1
TokenTree::Punct(punct) if punct.as_char() == '<' => n += 1,
// case #2
TokenTree::Punct(punct) if punct.as_char() == '>' => n -= 1,
// hit single sep
TokenTree::Punct(punct) if n == 0 && sep.len() == 1 => {
if punct.as_char() == sep.chars().next().unwrap() {
break;
}
}
// hit multi-sep
// HINT: multiple separators should be check one by one,
TokenTree::Punct(punct) if n == 0 => {
let mut chars = sep.chars();
// check if current punct match the first sep char
if punct.as_char() == chars.next().unwrap() {
// hit represents if left sep matched
let hit = chars
.zip(iter.clone())
.all(|(c, x)| {
match x {
TokenTree::Punct(ref punct) if punct.as_char() == c => true,
_ => false
}
});
if hit {
// because the first punct is already consume, so
// we should only skip length of sep - 1 steps. It
// should be sep.len() - 2 when using iter.nth()
iter.nth(sep.len() - 2);
break;
}
}
}
_ => ()
}
inner.push(next);
}
// end of tokens, break
None => break
}
}
tokens.push(TokenStream::from_iter(inner));
// break when no elements in iter
if iter.peek().is_none() {
break;
}
}
// return valid tokens
tokens
} |
use crate::geom::{about_equal, Vector};
use std::cmp::{Eq, PartialEq};
#[derive(Clone, Copy, Default, Debug)]
///A rectangle with a top-left position and a size
pub struct Rectangle {
///The top-left coordinate of the rectangle
pub pos: Vector,
///The width and height of the rectangle
pub size: Vector,
}
impl Rectangle {
///Create a rectangle from a top-left vector and a size vector
pub fn new(pos: impl Into<Vector>, size: impl Into<Vector>) -> Rectangle {
Rectangle {
pos: pos.into(),
size: size.into(),
}
}
///Create a rectangle at the origin with the given size
pub fn new_sized(size: impl Into<Vector>) -> Rectangle {
Rectangle {
pos: Vector::ZERO,
size: size.into(),
}
}
///Get the top left coordinate of the Rectangle
pub fn top_left(&self) -> Vector {
self.pos
}
///Get the x-coordinate of the Rectangle
///(The origin of a Rectangle is at the top left)
pub fn x(&self) -> f32 {
self.pos.x
}
///Get the y-coordinate of the Rectangle
///(The origin of a Rectangle is at the top left)
pub fn y(&self) -> f32 {
self.pos.y
}
///Get the size of the Rectangle
pub fn size(&self) -> Vector {
self.size
}
///Get the height of the Rectangle
pub fn height(&self) -> f32 {
self.size.y
}
///Get the width of the Rectangle
pub fn width(&self) -> f32 {
self.size.x
}
}
impl PartialEq for Rectangle {
fn eq(&self, other: &Rectangle) -> bool {
about_equal(self.x(), other.pos.x)
&& about_equal(self.y(), other.pos.y)
&& about_equal(self.width(), other.size.x)
&& about_equal(self.height(), other.size.y)
}
}
impl Eq for Rectangle {}
#[cfg(test)]
mod tests {
use crate::geom::*;
#[test]
fn overlap() {
let a = &Rectangle::new_sized((32, 32));
let b = &Rectangle::new((16, 16), (32, 32));
let c = &Rectangle::new((50, 50), (5, 5));
assert!(a.overlaps(b));
assert!(!a.overlaps(c));
}
#[test]
fn contains() {
let rect = Rectangle::new_sized((32, 32));
let vec1 = Vector::new(5, 5);
let vec2 = Vector::new(33, 1);
assert!(rect.contains(vec1));
assert!(!rect.contains(vec2));
}
#[test]
fn constraint() {
let constraint = &Rectangle::new_sized((10, 10));
let a = Rectangle::new((-1, 3), (5, 5));
let b = Rectangle::new((4, 4), (8, 3));
let a = a.constrain(constraint);
assert_eq!(a.top_left(), Vector::new(0, 3));
let b = b.constrain(constraint);
assert_eq!(b.top_left(), Vector::new(2, 4));
}
#[test]
fn translate() {
let a = Rectangle::new((10, 10), (5, 5));
let v = Vector::new(1, -1);
let translated = a.translate(v);
assert_eq!(a.top_left() + v, translated.top_left());
}
}
|
use elasticsearch::{
Elasticsearch,
IndexParts,
SearchParts,
params::Refresh,
};
use serde_json::{json, Value};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let client = Elasticsearch::default();
let res = client
.index(IndexParts::Index("sample"))
.body(json!({
"name": "test1",
"value": 123
}))
.refresh(Refresh::True)
.send()
.await?;
println!("{:?}", res);
let s_res = client
.search(SearchParts::Index(&["sample"]))
.body(json!({
"query": {
"match_all": {}
}
}))
.send()
.await?;
let body = s_res.json::<Value>().await?;
println!("{:?}", body);
Ok(())
}
|
use super::{DirBuilder, Entry, Leaf, PostOrderIterator, TreeBuildingFailed, TreeOptions};
use crate::Metadata;
use alloc::collections::btree_map::Entry::*;
use cid::Cid;
/// UnixFs directory tree builder which buffers entries until `build()` is called.
#[derive(Debug)]
pub struct BufferingTreeBuilder {
/// At the root there can be only one element, unless an option was given to create a new
/// directory surrounding the root elements.
root_builder: DirBuilder,
longest_path: usize,
// used to generate a unique id for each node; it is used when doing the post order traversal to
// recover all children's rendered Cids
counter: u64,
opts: TreeOptions,
}
impl Default for BufferingTreeBuilder {
fn default() -> Self {
Self::new(TreeOptions::default())
}
}
impl BufferingTreeBuilder {
/// Construct a new tree builder with the given configuration.
pub fn new(opts: TreeOptions) -> Self {
BufferingTreeBuilder {
root_builder: DirBuilder::root(0),
longest_path: 0,
counter: 1,
opts,
}
}
/// Registers the given path to be a link to the cid that follows. The target leaf should be
/// either a file, directory or symlink but could of course be anything. It will be treated as
/// an opaque link.
pub fn put_link(
&mut self,
full_path: &str,
target: Cid,
total_size: u64,
) -> Result<(), TreeBuildingFailed> {
let leaf = Leaf {
link: target,
total_size,
};
self.modify_with(full_path, |parent, basename, _| {
parent
.put_leaf(basename, leaf)
.map_err(|_| TreeBuildingFailed::DuplicatePath(full_path.to_string()))
})
}
/// Directories get "put" implicitly through the put files, and directories need to be adjusted
/// only when wanting them to have metadata.
pub fn set_metadata(
&mut self,
full_path: &str,
metadata: Metadata,
) -> Result<(), TreeBuildingFailed> {
// create all paths along the way
//
// set if not set, error otherwise? FIXME: doesn't error atm
self.modify_with(full_path, |parent, basename, id| {
parent
.add_or_get_node(basename, id)
.map_err(|_| TreeBuildingFailed::LeafAsDirectory(full_path.to_string()))?
.set_metadata(metadata);
Ok(())
})
}
fn modify_with<F>(&mut self, full_path: &str, f: F) -> Result<(), TreeBuildingFailed>
where
F: FnOnce(&mut DirBuilder, String, &mut Option<u64>) -> Result<(), TreeBuildingFailed>,
{
// create all paths along the way
//
// assuming it's ok to split at '/' since that cannot be escaped in linux at least
self.longest_path = full_path.len().max(self.longest_path);
let mut remaining = full_path.split('/').enumerate().peekable();
let mut dir_builder = &mut self.root_builder;
// check these before to avoid creation of bogus nodes in the tree or having to clean up.
if full_path.ends_with('/') {
return Err(TreeBuildingFailed::PathEndsInSlash(full_path.to_string()));
}
if full_path.contains("//") {
return Err(TreeBuildingFailed::RepeatSlashesInPath(
full_path.to_string(),
));
}
// needed to avoid borrowing into the DirBuilder::new calling closure
let counter = &mut self.counter;
while let Some((depth, next)) = remaining.next() {
let last = remaining.peek().is_none();
match (depth, next, last) {
// this might need to be accepted in case there is just a single file
(0, "", true) => {
// accepted: allows unconditional tree building in ipfs-http
// but the resulting tree will have at most single node, which doesn't prompt
// creation of new directories and should be fine.
}
(0, "", false) => {
// ok to keep this inside the loop; we are yet to create any nodes.
// note the ipfs-http (and for example js-ipfs) normalizes the path by
// removing the slash from the start.
return Err(TreeBuildingFailed::RootedPath(full_path.to_string()));
}
(_, "", false) => unreachable!("already validated: no repeat slashes"),
(_, "", true) => unreachable!("already validated: path does not end in slash"),
_ => {}
}
// our first level can be full, depending on the options given
let full = depth == 0 && !self.opts.wrap_with_directory && !dir_builder.is_empty();
if last {
let mut next_id = Some(*counter);
let ret = if full {
Err(TreeBuildingFailed::TooManyRootLevelEntries)
} else {
f(dir_builder, next.to_string(), &mut next_id)
};
if next_id.is_none() {
*counter += 1;
}
if ret.is_err() {
// FIXME: there might be a case where we have now stale nodes in our tree but
// cannot figure out an example for that.
}
return ret;
}
let parent_id = dir_builder.id;
dir_builder = match (full, dir_builder.nodes.entry(next.to_string())) {
(_, Occupied(oe)) => oe
.into_mut()
.as_dir_builder()
.map_err(|_| TreeBuildingFailed::LeafAsDirectory(full_path.to_string()))?,
(false, Vacant(ve)) => {
let next_id = *counter;
*counter += 1;
ve.insert(Entry::Directory(DirBuilder::new(parent_id, next_id)))
.as_dir_builder()
.expect("safe: we just inserted a DirBuilder")
}
(true, Vacant(_)) => return Err(TreeBuildingFailed::TooManyRootLevelEntries),
};
}
// as the str::split will always return a single element this should not ever be hit
unreachable!(
"walked the full_path but failed to add anything: {:?}",
full_path
);
}
/// Called to build the tree. The built tree will have the added files and their implied
/// directory structure, along with the directory entries which were created using
/// `set_metadata`. To build the whole hierarchy, one must iterate the returned iterator to
/// completion while storing the created blocks.
///
/// Returned `PostOrderIterator` will use the given `full_path` and `block_buffer` to store
/// its data during the walk. `PostOrderIterator` implements `Iterator` while also allowing
/// borrowed access via `next_borrowed`.
pub fn build(self) -> PostOrderIterator {
PostOrderIterator::new(self.root_builder, self.opts, self.longest_path)
}
}
#[cfg(test)]
mod tests {
use super::{
super::OwnedTreeNode, BufferingTreeBuilder, Metadata, TreeBuildingFailed, TreeOptions,
};
use cid::Cid;
use core::convert::TryFrom;
#[test]
fn some_directories() {
let mut builder = BufferingTreeBuilder::default();
// foobar\n
let five_block_foobar =
Cid::try_from("QmRJHYTNvC3hmd9gJQARxLR1QMEincccBV53bBw524yyq6").unwrap();
builder
.put_link("a/b/c/d/e/f/g.txt", five_block_foobar.clone(), 221)
.unwrap();
builder
.put_link("a/b/c/d/e/h.txt", five_block_foobar.clone(), 221)
.unwrap();
builder
.put_link("a/b/c/d/e/i.txt", five_block_foobar, 221)
.unwrap();
let actual = builder
.build()
.map(|res| res.map(|n| (n.path, n.cid, n.block)))
.collect::<Result<Vec<_>, _>>()
.unwrap();
let expected = vec![
(
"a/b/c/d/e/f",
"Qmbgf44ztW9wLcGNRNYGinGQB6SQDQtbHVbkM5MrWms698",
),
(
"a/b/c/d/e",
"Qma1hCr3CuPRAq2Gw4DCNMqsi42Bjs4Bt1MGSS57kNh144",
),
("a/b/c/d", "QmUqaYatcJqiSFdykHXGh4Nog1eMSfDJBeYzcG67KV5Ri4"),
("a/b/c", "QmYwaNBaGpDCNN9XpHmjxVPHmEXZMw9KDY3uikE2UU5fVB"),
("a/b", "QmeAzCPig4o4gBLh2LvP96Sr8MUBrsu2Scw9MTq1EvTDhY"),
("a", "QmSTUFaPwJW8xD4KNRLLQRqVTYtYC29xuhYTJoYPWdzvKp"),
];
verify_results(expected, actual);
}
#[test]
fn empty_path() {
let mut builder = BufferingTreeBuilder::default();
builder.put_link("", some_cid(0), 1).unwrap();
let actual = builder
.build()
.map(|res| res.map(|OwnedTreeNode { path, .. }| path))
.collect::<Result<Vec<_>, _>>()
.unwrap();
assert!(
actual.is_empty(),
"wrapping in directory was not asked, single element"
);
}
#[test]
#[should_panic]
fn rooted_path() {
let mut builder = BufferingTreeBuilder::default();
builder.put_link("/a", some_cid(0), 1).unwrap();
}
#[test]
#[should_panic]
fn successive_slashes() {
let mut builder = BufferingTreeBuilder::default();
builder.put_link("a//b", some_cid(0), 1).unwrap();
}
#[test]
fn multiple_roots() {
// foobar\n
let five_block_foobar =
Cid::try_from("QmRJHYTNvC3hmd9gJQARxLR1QMEincccBV53bBw524yyq6").unwrap();
let mut opts = TreeOptions::default();
opts.wrap_with_directory();
let mut builder = BufferingTreeBuilder::new(opts);
builder
.put_link("a", five_block_foobar.clone(), 221)
.unwrap();
builder.put_link("b", five_block_foobar, 221).unwrap();
let actual = builder
.build()
.map(|res| res.map(|OwnedTreeNode { path, cid, .. }| (path, cid.to_string())))
.collect::<Result<Vec<_>, _>>()
.unwrap();
assert_eq!(
actual,
&[(
"".to_string(),
"QmdbWuhpVCX9weVMMqvVTMeGwKMqCNJDbx7ZK1zG36sea7".to_string()
)]
);
}
#[test]
fn single_wrapped_root() {
// foobar\n
let five_block_foobar =
Cid::try_from("QmRJHYTNvC3hmd9gJQARxLR1QMEincccBV53bBw524yyq6").unwrap();
let mut opts = TreeOptions::default();
opts.wrap_with_directory();
let mut builder = BufferingTreeBuilder::new(opts);
builder.put_link("a", five_block_foobar, 221).unwrap();
let actual = builder
.build()
.map(|res| res.map(|OwnedTreeNode { path, cid, .. }| (path, cid.to_string())))
.collect::<Result<Vec<_>, _>>()
.unwrap();
assert_eq!(
actual,
&[(
"".to_string(),
"QmQBseoi3b2FBrYhjM2E4mCF4Q7C8MgCUbzAbGNfyVwgNk".to_string()
)]
);
}
#[test]
#[should_panic]
fn denied_multiple_root_dirs() {
let mut builder = BufferingTreeBuilder::default();
builder.put_link("a/c.txt", some_cid(0), 1).unwrap();
builder.put_link("b/d.txt", some_cid(1), 1).unwrap();
}
#[test]
#[should_panic]
fn denied_multiple_root_files() {
let mut builder = BufferingTreeBuilder::default();
builder.put_link("a.txt", some_cid(0), 1).unwrap();
builder.put_link("b.txt", some_cid(1), 1).unwrap();
}
#[test]
#[should_panic]
fn using_leaf_as_node() {
let mut builder = BufferingTreeBuilder::default();
builder.put_link("a.txt", some_cid(0), 1).unwrap();
builder.put_link("a.txt/b.txt", some_cid(1), 1).unwrap();
}
#[test]
fn set_metadata_before_files() {
let mut builder = BufferingTreeBuilder::default();
builder
.set_metadata("a/b/c/d", Metadata::default())
.unwrap();
builder.put_link("a/b/c/d/e.txt", some_cid(1), 1).unwrap();
builder.put_link("a/b/c/d/f.txt", some_cid(2), 1).unwrap();
let actual = builder
.build()
.map(|res| res.map(|OwnedTreeNode { path, .. }| path))
.collect::<Result<Vec<_>, _>>()
.unwrap();
assert_eq!(actual, &["a/b/c/d", "a/b/c", "a/b", "a",])
}
#[test]
fn set_metadata_on_file() {
let mut builder = BufferingTreeBuilder::default();
builder.put_link("a/a.txt", some_cid(0), 1).unwrap();
let err = builder
.set_metadata("a/a.txt", Metadata::default())
.unwrap_err();
assert!(
matches!(err, TreeBuildingFailed::LeafAsDirectory(_)),
"{:?}",
err
);
}
#[test]
fn dir_with_cidv1_link() {
// this is `echo '{ "name": "hello" }` | ./ipfs dag put`
let target =
Cid::try_from("bafyreihakpd7te5nbmlhdk5ntvcvhf2hmfgrvcwna2sddq5zz5342mcbli").unwrap();
let mut builder = BufferingTreeBuilder::default();
builder.put_link("a/b", target, 12).unwrap();
let actual = builder
.build()
.map(|res| res.map(|n| (n.path, n.cid, n.block)))
.collect::<Result<Vec<_>, _>>()
.unwrap();
let expected = vec![("a", "QmPMDMPG8dbHDC9GuvqWr9pfruLnp4GZCAWrskwCmenVQa")];
verify_results(expected, actual);
}
fn verify_results(
mut expected: Vec<(
impl AsRef<str> + core::fmt::Debug,
impl AsRef<str> + core::fmt::Debug,
)>,
mut actual: Vec<(String, Cid, Box<[u8]>)>,
) {
use core::fmt;
struct Hex<'a>(&'a [u8]);
impl<'a> fmt::Debug for Hex<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
for b in self.0 {
write!(fmt, "{:02x}", b)?;
}
Ok(())
}
}
// hopefully this way the errors will be easier to hunt down
actual.reverse();
expected.reverse();
while let Some(actual) = actual.pop() {
let expected = expected.pop().expect("size mismatch");
assert_eq!(actual.0, expected.0.as_ref());
assert_eq!(
actual.1.to_string(),
expected.1.as_ref(),
"{:?}: {:?}",
actual.0,
Hex(&actual.2)
);
}
assert_eq!(expected.len(), 0, "size mismatch: {:?}", actual);
}
/// Returns a quick and dirty sha2-256 of the given number as a Cidv0
fn some_cid(number: usize) -> Cid {
use multihash::Sha2_256;
let mh = Sha2_256::digest(&number.to_le_bytes());
Cid::new_v0(mh).unwrap()
}
}
|
use crow::Context;
use crate::{
config::{Config, PlayerAnimationsConfig},
data::{
Camera, Collider, ColliderType, Components, Depth, Gravity, PlayerAnimations, PlayerState,
Position, Velocity,
},
ressources::Ressources,
};
pub fn player(
ctx: &mut Context,
c: &mut Components,
r: &mut Ressources,
) -> Result<(), crow::Error> {
#[cfg(feature = "profiler")]
profile_scope!("player");
let player = c.new_entity();
c.positions.insert(player, r.last_save.position);
c.colliders.insert(
player,
Collider {
w: 7.0,
h: 15.0,
ty: ColliderType::Player,
},
);
c.velocities.insert(player, Velocity { x: 0.0, y: 0.0 });
c.gravity.insert(player, Gravity);
c.player_state.insert(player, PlayerState::Grounded);
c.depths.insert(player, Depth::Player);
let player_animations = PlayerAnimations::from_config(
ctx,
&mut r.animation_storage,
PlayerAnimationsConfig::load("ressources/player/animations.ron").unwrap(),
)?;
c.animations
.insert(player, r.animation_storage.start(player_animations.idle));
c.player_animations.insert(player, player_animations);
Ok(())
}
pub fn camera(c: &mut Components, r: &mut Ressources) {
let camera = c.new_entity();
c.cameras.insert(camera, Camera);
c.positions.insert(camera, Position { x: 0.0, y: 0.0 });
c.velocities.insert(camera, Velocity { x: 0.0, y: 0.0 });
c.colliders.insert(
camera,
Collider {
w: r.config.window.size.0 as f32,
h: r.config.window.size.1 as f32,
ty: ColliderType::Camera,
},
);
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
pub type PrintTicketCapabilities = *mut ::core::ffi::c_void;
pub type PrintTicketFeature = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct PrintTicketFeatureSelectionType(pub i32);
impl PrintTicketFeatureSelectionType {
pub const PickOne: Self = Self(0i32);
pub const PickMany: Self = Self(1i32);
}
impl ::core::marker::Copy for PrintTicketFeatureSelectionType {}
impl ::core::clone::Clone for PrintTicketFeatureSelectionType {
fn clone(&self) -> Self {
*self
}
}
pub type PrintTicketOption = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct PrintTicketParameterDataType(pub i32);
impl PrintTicketParameterDataType {
pub const Integer: Self = Self(0i32);
pub const NumericString: Self = Self(1i32);
pub const String: Self = Self(2i32);
}
impl ::core::marker::Copy for PrintTicketParameterDataType {}
impl ::core::clone::Clone for PrintTicketParameterDataType {
fn clone(&self) -> Self {
*self
}
}
pub type PrintTicketParameterDefinition = *mut ::core::ffi::c_void;
pub type PrintTicketParameterInitializer = *mut ::core::ffi::c_void;
pub type PrintTicketValue = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct PrintTicketValueType(pub i32);
impl PrintTicketValueType {
pub const Integer: Self = Self(0i32);
pub const String: Self = Self(1i32);
pub const Unknown: Self = Self(2i32);
}
impl ::core::marker::Copy for PrintTicketValueType {}
impl ::core::clone::Clone for PrintTicketValueType {
fn clone(&self) -> Self {
*self
}
}
pub type WorkflowPrintTicket = *mut ::core::ffi::c_void;
pub type WorkflowPrintTicketValidationResult = *mut ::core::ffi::c_void;
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::any::Any;
use std::io::BufRead;
use std::io::Cursor;
use common_exception::Result;
use common_expression::types::array::ArrayColumnBuilder;
use common_expression::types::string::StringColumnBuilder;
use common_expression::types::AnyType;
use common_expression::ColumnBuilder;
use common_io::constants::FALSE_BYTES_LOWER;
use common_io::constants::INF_BYTES_LOWER;
use common_io::constants::NULL_BYTES_ESCAPE;
use common_io::constants::TRUE_BYTES_LOWER;
use common_io::cursor_ext::ReadBytesExt;
use crate::field_decoder::row_based::FieldDecoderRowBased;
use crate::field_decoder::values::FieldDecoderValues;
use crate::CommonSettings;
use crate::FieldDecoder;
use crate::FileFormatOptionsExt;
#[derive(Clone)]
pub struct FieldDecoderCSV {
pub nested: FieldDecoderValues,
pub common_settings: CommonSettings,
}
impl FieldDecoderCSV {
pub fn create(options: &FileFormatOptionsExt) -> Self {
FieldDecoderCSV {
nested: FieldDecoderValues::create(options),
common_settings: CommonSettings {
true_bytes: TRUE_BYTES_LOWER.as_bytes().to_vec(),
false_bytes: FALSE_BYTES_LOWER.as_bytes().to_vec(),
null_bytes: NULL_BYTES_ESCAPE.as_bytes().to_vec(),
nan_bytes: options.stage.nan_display.as_bytes().to_vec(),
inf_bytes: INF_BYTES_LOWER.as_bytes().to_vec(),
timezone: options.timezone,
},
}
}
}
impl FieldDecoder for FieldDecoderCSV {
fn as_any(&self) -> &dyn Any {
self
}
}
impl FieldDecoderRowBased for FieldDecoderCSV {
fn common_settings(&self) -> &CommonSettings {
&self.common_settings
}
fn ignore_field_end<R: AsRef<[u8]>>(&self, reader: &mut Cursor<R>) -> bool {
reader.eof()
}
fn read_string_inner<R: AsRef<[u8]>>(
&self,
reader: &mut Cursor<R>,
out_buf: &mut Vec<u8>,
_raw: bool,
) -> Result<()> {
let buf = reader.remaining_slice();
out_buf.extend_from_slice(buf);
reader.consume(buf.len());
Ok(())
}
fn read_string<R: AsRef<[u8]>>(
&self,
column: &mut StringColumnBuilder,
reader: &mut Cursor<R>,
_raw: bool,
) -> Result<()> {
let buf = reader.remaining_slice();
column.put_slice(buf);
column.commit_row();
reader.consume(buf.len());
Ok(())
}
fn read_variant<R: AsRef<[u8]>>(
&self,
column: &mut StringColumnBuilder,
reader: &mut Cursor<R>,
_raw: bool,
) -> Result<()> {
let buf = reader.remaining_slice();
column.put_slice(buf);
column.commit_row();
reader.consume(buf.len());
Ok(())
}
fn read_array<R: AsRef<[u8]>>(
&self,
column: &mut ArrayColumnBuilder<AnyType>,
reader: &mut Cursor<R>,
_raw: bool,
) -> Result<()> {
self.nested.read_array(column, reader, false)?;
Ok(())
}
fn read_map<R: AsRef<[u8]>>(
&self,
column: &mut ArrayColumnBuilder<AnyType>,
reader: &mut Cursor<R>,
_raw: bool,
) -> Result<()> {
self.nested.read_map(column, reader, false)?;
Ok(())
}
fn read_tuple<R: AsRef<[u8]>>(
&self,
fields: &mut Vec<ColumnBuilder>,
reader: &mut Cursor<R>,
_raw: bool,
) -> Result<()> {
self.nested.read_tuple(fields, reader, false)?;
Ok(())
}
}
|
use yew::prelude::*;
use yew_functional::*;
use crate::components::icons::{
feather_twitter::FeatherTwitter, ri_bookmark::RiBookmark, ri_sticky_notes::RiStickyNotes,
util_github_alt::UtilGithubAlt,
};
use crate::components::toggle_theme::ToggleTheme;
#[function_component(Navbar)]
pub fn navbar() -> Html {
let (is_dark, set_is_dark) = {
let window = web_sys::window().expect("no global `window` exists");
let mut is_perfered_dark = false;
match window.match_media("(prefers-color-scheme: dark)") {
Ok(option_media_query_list) => match option_media_query_list {
Some(media_query_list) => {
is_perfered_dark = media_query_list.matches();
}
None => {}
},
Err(_) => {}
};
use_state(move || is_perfered_dark)
};
let set_is_dark_cb = Callback::from(move |is_dark: bool| set_is_dark(is_dark));
html! {
<header class="header">
<nav class="nav">
<a href="/" class="w-10 h-10 absolute lg:fixed m-6 select-none outline-none" focusable="false">
{if *is_dark {
html!{<img src="/rust-moon.svg" alt="logo" />}
} else {
html!{<img src="/rust.svg" alt="logo" />}
}}
</a>
<div class="spacer"></div>
<div class="right">
<a href="https://yuchanns.xyz">{"Blog"}</a>
<a href="https://github.com/yuchanns?tab=repositories&type=source">{"Projects"}</a>
<a title="Bookmarks"><RiBookmark /></a>
<a title="Bookmarks"><RiStickyNotes /></a>
<a href="https://twitter.com/airamusume" title="twitter" class="hidden md:block"><FeatherTwitter /></a>
<a href="https://github.com/yuchanns" title="github" class="hidden md:block"><UtilGithubAlt /></a>
<ToggleTheme is_dark=is_dark.clone() set_is_dark=set_is_dark_cb />
</div>
</nav>
</header>
}
}
|
use super::ast::*;
use super::scan::*;
pub fn parse<'a>(scanner: &mut Scanner<'a>) -> Result<Expr<'a>, &'static str> {
parse_expr(scanner, 0)
}
pub fn parse_expr<'a>(scanner: &mut Scanner<'a>, min_bp: u8) -> Result<Expr<'a>, &'static str> {
let lhs_token = scanner.next_token()?;
let mut lhs = match lhs_token.kind {
TokenKind::Variable => Expr::Variable { name: lhs_token },
TokenKind::SpecialVariable => Expr::SpecialVariable { name: lhs_token },
TokenKind::BuiltinFunction => {
consume(scanner, TokenKind::LeftParen)?;
let mut args = Vec::new();
loop {
args.push(parse(scanner)?);
if scanner.peek_token(0)?.kind == TokenKind::RightParen {
break;
}
consume(scanner, TokenKind::Comma)?;
}
consume(scanner, TokenKind::RightParen)?;
Expr::Call {
name: lhs_token,
args,
}
}
TokenKind::Integer(_) | TokenKind::Float(_) => Expr::Literal { literal: lhs_token },
TokenKind::LeftParen => {
let lhs = parse(scanner)?;
consume(scanner, TokenKind::RightParen)?;
lhs
}
TokenKind::Minus => {
let bp = prefix_bp(lhs_token.kind)?;
let rhs = Box::new(parse_expr(scanner, bp)?);
Expr::Unary { op: lhs_token, rhs }
}
_ => panic!("syntax err: {:?}", lhs_token),
};
loop {
let op_token = scanner.peek_token(0)?;
if op_token.kind == TokenKind::End {
break;
}
if let Some(lhs_bp) = postfix_bp(op_token.kind) {
if lhs_bp < min_bp {
break;
}
lhs = Expr::Unary {
op: scanner.next_token()?,
rhs: Box::new(lhs),
};
continue;
}
if op_token.kind == TokenKind::Variable {
if MULTIPLY_DIVIDE_MOD < min_bp {
break;
}
lhs = Expr::Binary {
lhs: Box::new(lhs),
op: Token::new(TokenKind::Multiply, "*"),
rhs: Box::new(parse_expr(scanner, MULTIPLY_DIVIDE_MOD + 1)?),
};
continue;
}
if let Some(lbp) = infix_bp(op_token.kind) {
if lbp < min_bp {
break;
}
if scanner.peek_token(0)?.kind != TokenKind::LeftParen {
let op = scanner.next_token()?;
lhs = Expr::Binary {
lhs: Box::new(lhs),
op,
rhs: Box::new(parse_expr(scanner, lbp + 1)?),
};
} else {
consume(scanner, TokenKind::LeftParen)?;
let rhs = parse_expr(scanner, 0)?;
consume(scanner, TokenKind::RightParen)?;
lhs = Expr::Binary {
lhs: Box::new(lhs),
op: Token::new(TokenKind::Multiply, "*"),
rhs: Box::new(rhs),
};
}
continue;
}
break;
}
Ok(lhs)
}
const COMPARE: u8 = 1;
const PLUS_MINUS: u8 = 2;
const MULTIPLY_DIVIDE_MOD: u8 = 3;
const POWER: u8 = 4;
const MINUS_PREFIX: u8 = 5;
const FACTORIAL: u8 = 6;
fn prefix_bp(kind: TokenKind) -> Result<u8, &'static str> {
match kind {
TokenKind::Minus => Ok(MINUS_PREFIX),
_ => Err("syntax err: prefix op"),
}
}
fn postfix_bp(kind: TokenKind) -> Option<u8> {
match kind {
TokenKind::Factorial => Some(FACTORIAL),
_ => None,
}
}
fn infix_bp(kind: TokenKind) -> Option<u8> {
match kind {
TokenKind::Equal
| TokenKind::Less
| TokenKind::LessEqual
| TokenKind::Greater
| TokenKind::GreaterEqual
| TokenKind::NotEqual => Some(COMPARE),
TokenKind::Plus | TokenKind::Minus => Some(PLUS_MINUS),
TokenKind::Multiply | TokenKind::Divide | TokenKind::Modulo | TokenKind::LeftParen => {
Some(MULTIPLY_DIVIDE_MOD)
}
TokenKind::Power => Some(POWER),
_ => None,
}
}
fn consume(scanner: &mut Scanner<'_>, kind: TokenKind) -> Result<(), &'static str> {
let t = scanner.next_token()?;
if t.kind == kind {
Ok(())
} else {
println!("wanted {:?} got {:?}", kind, t.kind);
Err("syntax error")
}
}
|
use actix_web::{http::StatusCode, FromRequest, HttpResponse, Json, Path, Query};
use bigneon_api::controllers::ticket_types;
use bigneon_api::controllers::ticket_types::*;
use bigneon_api::models::{
AdminDisplayTicketType, EventTicketPathParameters, PagingParameters, PathParameters, Payload,
};
use bigneon_db::models::*;
use chrono::prelude::*;
use serde_json;
use support;
use support::database::TestDatabase;
use support::test_request::TestRequest;
pub fn create(role: Roles, should_test_succeed: bool) {
let database = TestDatabase::new();
let user = database.create_user().finish();
let organization = database.create_organization().finish();
let auth_user =
support::create_auth_user_from_user(&user, role, Some(&organization), &database);
let event = database
.create_event()
.with_organization(&organization)
.finish();
//Construct Ticket creation and pricing request
let test_request = TestRequest::create();
let state = test_request.extract_state();
let mut path = Path::<PathParameters>::extract(&test_request.request).unwrap();
path.id = event.id;
let mut ticket_pricing: Vec<CreateTicketPricingRequest> = Vec::new();
let start_date = NaiveDate::from_ymd(2018, 5, 1).and_hms(6, 20, 21);
let middle_date = NaiveDate::from_ymd(2018, 6, 2).and_hms(7, 45, 31);
let end_date = NaiveDate::from_ymd(2018, 7, 3).and_hms(9, 23, 23);
ticket_pricing.push(CreateTicketPricingRequest {
name: String::from("Early bird"),
price_in_cents: 10000,
start_date,
end_date: middle_date,
});
ticket_pricing.push(CreateTicketPricingRequest {
name: String::from("Base"),
price_in_cents: 20000,
start_date: middle_date,
end_date,
});
let request_data = CreateTicketTypeRequest {
name: "VIP".into(),
capacity: 1000,
start_date,
end_date,
ticket_pricing,
increment: None,
};
let response: HttpResponse = ticket_types::create((
database.connection.into(),
path,
Json(request_data),
auth_user,
state,
)).into();
let body = support::unwrap_body_to_string(&response).unwrap();
if should_test_succeed {
assert_eq!(response.status(), StatusCode::CREATED);
} else {
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
let temp_json = HttpResponse::Unauthorized().json(json!({"error": "Unauthorized"}));
let updated_event = support::unwrap_body_to_string(&temp_json).unwrap();
assert_eq!(body, updated_event);
}
}
pub fn update(role: Roles, should_test_succeed: bool) {
let database = TestDatabase::new();
let request = TestRequest::create();
let user = database.create_user().finish();
let organization = database.create_organization().finish();
let auth_user =
support::create_auth_user_from_user(&user, role, Some(&organization), &database);
let event = database
.create_event()
.with_organization(&organization)
.with_tickets()
.with_ticket_pricing()
.finish();
//Retrieve created ticket type and pricing
let created_ticket_type = &event.ticket_types(&database.connection).unwrap()[0];
let created_ticket_capacity = created_ticket_type
.ticket_capacity(&database.connection)
.unwrap();
let created_ticket_pricing = created_ticket_type
.ticket_pricing(&database.connection)
.unwrap();
//Construct update request
let test_request = TestRequest::create_with_uri_event_ticket("/");
let mut path = Path::<EventTicketPathParameters>::extract(&test_request.request).unwrap();
path.event_id = event.id;
path.ticket_type_id = created_ticket_type.id;
let mut request_ticket_pricing: Vec<UpdateTicketPricingRequest> = Vec::new();
let start_date = Some(NaiveDate::from_ymd(2018, 5, 1).and_hms(6, 20, 21));
let middle_date = Some(NaiveDate::from_ymd(2018, 6, 2).and_hms(7, 45, 31));
let end_date = Some(NaiveDate::from_ymd(2018, 7, 3).and_hms(9, 23, 23));
let new_pricing_name = String::from("Online");
//Remove 1st pricing, modify 2nd pricing and add new additional pricing
request_ticket_pricing.push(UpdateTicketPricingRequest {
id: Some(created_ticket_pricing[1].id),
name: Some(String::from("Base")),
start_date: middle_date,
end_date,
price_in_cents: Some(20000),
});
request_ticket_pricing.push(UpdateTicketPricingRequest {
id: None,
name: Some(new_pricing_name.clone()),
start_date,
end_date: middle_date,
price_in_cents: Some(15000),
});
let request_data = UpdateTicketTypeRequest {
name: Some("Updated VIP".into()),
capacity: Some(created_ticket_capacity),
start_date,
end_date,
ticket_pricing: Some(request_ticket_pricing),
increment: None,
};
let request_json = serde_json::to_string(&request_data).unwrap();
//Send update request
let response: HttpResponse = ticket_types::update((
database.connection.clone().into(),
path,
Json(request_data),
auth_user,
request.extract_state(),
)).into();
//Check if fields have been updated by retrieving the ticket type and pricing
let updated_ticket_type = &event.ticket_types(&database.connection).unwrap()[0];
let updated_ticket_capacity = updated_ticket_type
.ticket_capacity(&database.connection)
.unwrap();
let updated_ticket_pricing = updated_ticket_type
.ticket_pricing(&database.connection)
.unwrap();
let mut new_ticket_pricing: Vec<UpdateTicketPricingRequest> = Vec::new();
new_ticket_pricing.reserve(updated_ticket_pricing.len());
for current_ticket_pricing in &updated_ticket_pricing {
//Replace the id of the new additional pricing with None so we can compare it with the request json
let option_pricing_id = if current_ticket_pricing.name == new_pricing_name {
None
} else {
Some(current_ticket_pricing.id)
};
new_ticket_pricing.push(UpdateTicketPricingRequest {
id: option_pricing_id,
name: Some(current_ticket_pricing.name.clone()),
start_date: Some(current_ticket_pricing.start_date),
end_date: Some(current_ticket_pricing.end_date),
price_in_cents: Some(current_ticket_pricing.price_in_cents),
});
}
let updated_data = UpdateTicketTypeRequest {
name: Some(updated_ticket_type.name.clone()),
capacity: Some(updated_ticket_capacity),
start_date: Some(updated_ticket_type.start_date),
end_date: Some(updated_ticket_type.end_date),
ticket_pricing: Some(new_ticket_pricing),
increment: None,
};
let updated_json = serde_json::to_string(&updated_data).unwrap();
if should_test_succeed {
assert_eq!(request_json, updated_json);
assert_eq!(response.status(), StatusCode::OK);
} else {
assert_eq!(response.status(), StatusCode::UNAUTHORIZED);
}
}
pub fn index(role: Roles, should_test_succeed: bool) {
let database = TestDatabase::new();
let user = database.create_user().finish();
let request = TestRequest::create();
let organization = database.create_organization().finish();
let fee_schedule =
FeeSchedule::find(organization.fee_schedule_id, &database.connection).unwrap();
let auth_user =
support::create_auth_user_from_user(&user, role, Some(&organization), &database);
let event = database
.create_event()
.with_organization(&organization)
.with_ticket_pricing()
.finish();
let mut path = Path::<PathParameters>::extract(&request.request).unwrap();
path.id = event.id;
let test_request = TestRequest::create_with_uri(&format!("/limits?"));
let query_parameters =
Query::<PagingParameters>::from_request(&test_request.request, &()).unwrap();
let response = ticket_types::index((
database.connection.clone().into(),
path,
query_parameters,
auth_user,
)).unwrap();
if should_test_succeed {
let body = support::unwrap_body_to_string(&response).unwrap();
assert_eq!(response.status(), StatusCode::OK);
let ticket_type = &event.ticket_types(&database.connection).unwrap()[0];
let expected_ticket_types = vec![
AdminDisplayTicketType::from_ticket_type(
ticket_type,
&fee_schedule,
&database.connection,
).unwrap(),
];
let ticket_types_response: Payload<AdminDisplayTicketType> =
serde_json::from_str(&body).unwrap();
assert_eq!(ticket_types_response.data, expected_ticket_types);
} else {
support::expects_unauthorized(&response);
}
}
|
//! Tests auto-converted from "sass-spec/spec/selector-functions/extend"
#[allow(unused)]
use super::rsass;
#[allow(unused)]
use rsass::precision;
// Ignoring "nested", not expected to work yet.
// Ignoring "simple", not expected to work yet.
|
mod uri;
use self::uri::absolute_url;
use crate::http::{bad_request, redirect_to, Request, Result};
use hyper::Uri;
use serde_derive::Deserialize;
use std::cmp::min;
#[derive(Deserialize)]
pub struct RedirectUrlParams {
url: String,
}
pub async fn to(req: Request) -> Result {
let query = req
.query::<RedirectUrlParams>()
.map_err(|_| bad_request())?;
let uri = query.url.parse::<Uri>().map_err(|_| bad_request())?;
redirect_to(uri)
}
pub async fn redirect(req: Request) -> Result {
relative(req).await
}
pub async fn relative(req: Request) -> Result {
let n = req.param::<u16>("n").ok_or_else(bad_request)?;
let n = min(n - 1, 100);
let url = if n > 0 {
format!("/relative-redirect/{}", n)
} else {
String::from("/")
};
let uri = url.parse::<Uri>().map_err(|_| bad_request())?;
redirect_to(uri)
}
pub async fn absolute(req: Request) -> Result {
let n = req.param::<u16>("n").ok_or_else(bad_request)?;
let n = min(n - 1, 100);
let url = if n > 0 {
format!("/absolute-redirect/{}", n)
} else {
String::from("/")
};
let request_uri = req.uri();
let response_uri = absolute_url(&req, request_uri)
.and_then(|base| Ok(base.join(&url)?))
.and_then(|url| Ok(url.to_string().parse::<Uri>()?))
.map_err(|_| bad_request())?;
redirect_to(response_uri)
}
#[cfg(test)]
mod test {
use super::*;
use crate::headers::HeaderMapExt;
use crate::headers::Host;
use crate::headers::Location;
use crate::test::*;
use hyper::http::StatusCode;
use hyper::http::{uri::Authority, Uri};
#[tokio::test]
async fn test_redirect_to() {
let res = request()
.path("/?url=http://example.com")
.handle(to)
.await
.unwrap();
assert_eq!(res.status(), StatusCode::FOUND);
assert_eq!(
res.headers().typed_get::<Location>().unwrap().uri(),
&Uri::from_static("http://example.com/")
)
}
#[tokio::test]
async fn test_redirect() {
let res = request().param("n", "5").handle(redirect).await.unwrap();
assert_eq!(res.status(), StatusCode::FOUND);
assert_eq!(
res.headers().typed_get::<Location>().unwrap().uri(),
&Uri::from_static("/relative-redirect/4")
)
}
#[tokio::test]
async fn test_redirect_last() {
let res = request().param("n", "1").handle(redirect).await.unwrap();
assert_eq!(res.status(), StatusCode::FOUND);
assert_eq!(
res.headers().typed_get::<Location>().unwrap().uri(),
&Uri::from_static("/")
)
}
#[tokio::test]
async fn test_relative_redirect() {
let res = request().param("n", "5").handle(relative).await.unwrap();
assert_eq!(res.status(), StatusCode::FOUND);
assert_eq!(
res.headers().typed_get::<Location>().unwrap().uri(),
&Uri::from_static("/relative-redirect/4")
)
}
#[tokio::test]
async fn test_relative_redirect_last() {
let res = request().param("n", "1").handle(relative).await.unwrap();
assert_eq!(res.status(), StatusCode::FOUND);
assert_eq!(
res.headers().typed_get::<Location>().unwrap().uri(),
&Uri::from_static("/")
)
}
#[tokio::test]
async fn test_absolute_redirect() {
let res = request()
.typed_header(Host::from(Authority::from_static("example.com")))
.param("n", "5")
.handle(absolute)
.await
.unwrap();
assert_eq!(res.status(), StatusCode::FOUND);
assert_eq!(
res.headers().typed_get::<Location>().unwrap().uri(),
&Uri::from_static("http://example.com/absolute-redirect/4")
)
}
#[tokio::test]
async fn test_absolute_redirect_last() {
let res = request()
.typed_header(Host::from(Authority::from_static("example.com")))
.param("n", "1")
.handle(absolute)
.await
.unwrap();
assert_eq!(res.status(), StatusCode::FOUND);
assert_eq!(
res.headers().typed_get::<Location>().unwrap().uri(),
&Uri::from_static("http://example.com/")
)
}
}
|
use std::fs;
const PREAMBLE_LENGTH: usize = 25;
fn main() {
let contents = fs::read_to_string("input.txt")
.expect("Failed to read file");
let numbers: Vec<i64> = contents.trim().split("\n").map(|n| { n.trim().parse().unwrap() }).collect();
let len = numbers.len();
// part 1
let mut invalid_number: i64 = 0;
for (i, n) in numbers.iter().enumerate() {
if i >= PREAMBLE_LENGTH {
let mut is_valid = false;
for j in (i-PREAMBLE_LENGTH)..=(i-1) {
for k in (i-PREAMBLE_LENGTH)..=(i-1) {
if j != k && (numbers[j] + numbers[k]) == *n {
is_valid = true;
}
}
}
if !is_valid {
println!("{} is not valid!", n);
invalid_number = *n;
}
}
}
for i in 0..=len-2 {
for j in i+1..=len-1 {
let sum: i64 = numbers[i..j+1].iter().sum();
if sum == invalid_number {
println!("Weakness is {}", numbers[i..j+1].iter().min().unwrap() + numbers[i..j+1].iter().max().unwrap());
}
if sum > invalid_number {
break;
}
}
}
}
|
#[derive(Clone, Deserialize)]
pub struct MusicSettings {
pub volume: f32,
pub decreased_volume: f32,
}
|
use {Async, Poll};
use stream::{Stream, Fuse};
/// An adapter for merging the output of two streams.
///
/// The merged stream produces items from one or both of the underlying
/// streams as they become available. Errors, however, are not merged: you
#[derive(Debug)]
/// get at most one error at a time.
#[must_use = "streams do nothing unless polled"]
pub struct Zip<S1: Stream, S2: Stream> {
stream1: Fuse<S1>,
stream2: Fuse<S2>,
queued1: Option<S1::Item>,
queued2: Option<S2::Item>,
}
pub fn new<S1, S2>(stream1: S1, stream2: S2) -> Zip<S1, S2>
where S1: Stream, S2: Stream<Error = S1::Error>
{
Zip {
stream1: stream1.fuse(),
stream2: stream2.fuse(),
queued1: None,
queued2: None,
}
}
impl<S1, S2> Stream for Zip<S1, S2>
where S1: Stream, S2: Stream<Error = S1::Error>
{
type Item = (S1::Item, S2::Item);
type Error = S1::Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
if self.queued1.is_none() {
match self.stream1.poll()? {
Async::Ready(Some(item1)) => self.queued1 = Some(item1),
Async::Ready(None) | Async::NotReady => {}
}
}
if self.queued2.is_none() {
match self.stream2.poll()? {
Async::Ready(Some(item2)) => self.queued2 = Some(item2),
Async::Ready(None) | Async::NotReady => {}
}
}
if self.queued1.is_some() && self.queued2.is_some() {
let pair = (self.queued1.take().unwrap(),
self.queued2.take().unwrap());
Ok(Async::Ready(Some(pair)))
} else if self.stream1.is_done() || self.stream2.is_done() {
Ok(Async::Ready(None))
} else {
Ok(Async::NotReady)
}
}
}
|
extern crate futures;
extern crate hyper;
extern crate url;
use std::env;
use std::io::Error;
use futures::Stream;
use futures::future::{Future, ok};
use hyper::{Method, Uri};
use hyper::server::{Http, Request, Response, Service};
struct DebugRequest {
url: Uri,
method: Method,
body: String,
}
use std::fmt;
use std::fmt::Display;
impl Display for DebugRequest {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "url = {}\nmethod = {}\nbody = \"{}\"", self.url, self.method, self.body)
}
}
fn log(uri: Uri, method: Method, body: String) -> Box<Future<Item = (), Error = Error>> {
Box::new(ok(println!("{}", DebugRequest {
url: uri,
method: method,
body: body,
})))
}
struct Debug;
impl Service for Debug {
type Request = Request;
type Response = Response;
type Error = hyper::Error;
type Future = Box<Future<Item = Self::Response, Error = Self::Error>>;
fn call(&self, req: Request) -> Self::Future {
let method = req.method().clone();
let uri = req.uri().clone();
Box::new(
req.body().concat2()
.and_then(|chunks| ok(
String::from_utf8(
chunks.iter().map(|c| *c).collect::<Vec<u8>>()
).unwrap_or("".to_owned())
))
.and_then(move |body| log(uri, method, body).map_err(hyper::Error::from))
.and_then(|_| ok(
Response::new().with_status(hyper::StatusCode::Ok)
))
)
}
}
fn main() {
let port = env::var("DEBUG_PORT").unwrap_or("8080".to_owned());
let addr = format!("0.0.0.0:{}", port).parse().unwrap();
println!("listening on {}", addr);
Http::new().bind(&addr, || Ok(Debug)).and_then(|s| s.run()).unwrap();
}
|
// This file is part of linux-epoll. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT. No part of linux-epoll, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2019 The developers of linux-epoll. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/linux-epoll/master/COPYRIGHT.
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct ParsedLabels
{
pub(crate) start_of_message_pointer: usize,
parsed_labels: HashMap<u16, ParsedLabelInformation>,
}
impl ParsedLabels
{
#[inline(always)]
pub(crate) fn new(start_of_message_pointer: usize) -> Self
{
Self
{
start_of_message_pointer,
parsed_labels: HashMap::with_capacity(128),
}
}
#[inline(always)]
pub(crate) fn parse_name_in_slice_with_nothing_left<'message>(&mut self, slice: &'message mut [u8]) -> Result<WithCompressionParsedName<'message>, DnsProtocolError>
{
match self.parse_name_in_slice(slice)
{
Err(error) => Err(error),
Ok((parsed_name_iterator, end_of_name_pointer)) => if unlikely!(end_of_name_pointer - slice.len() != slice.as_ptr() as usize)
{
Err(NameWasNotLongEnough)
}
else
{
Ok(parsed_name_iterator)
}
}
}
#[inline(always)]
pub(crate) fn parse_name_in_slice<'message>(&mut self, slice: &'message mut [u8]) -> Result<(WithCompressionParsedName<'message>, usize), DnsProtocolError>
{
let length = slice.len();
if unlikely!(length == 0)
{
return Err(NameIsEmpty)
}
let start_of_name_pointer = slice.as_ptr() as usize;
self.parse_name(start_of_name_pointer, start_of_name_pointer + length)
}
#[inline(always)]
pub(crate) fn parse_without_compression_but_register_labels_for_compression<'message>(&mut self, start_of_name_pointer: usize, end_of_data_section_containing_name_pointer: usize) -> Result<(WithoutCompressionParsedName<'message>, usize), DnsProtocolError>
{
WithoutCompressionParsedName::parse_without_compression_but_register_labels_for_compression(self, start_of_name_pointer, end_of_data_section_containing_name_pointer)
}
#[inline(always)]
pub(crate) fn parse_name<'message>(&mut self, start_of_name_pointer: usize, end_of_data_section_containing_name_pointer: usize) -> Result<(WithCompressionParsedName<'message>, usize), DnsProtocolError>
{
WithCompressionParsedName::parse_with_compression(self, start_of_name_pointer, end_of_data_section_containing_name_pointer)
}
#[inline(always)]
pub(crate) fn guard(&self, offset: usize, start_of_name_pointer: usize, labels_register_reference: &mut LabelsRegister) -> Result<(usize, u8, u8), DnsProtocolError>
{
debug_assert!(offset <= ::std::u16::MAX as usize, "offset is larger than ::std::u16::MAX");
let points_to_label_at = self.start_of_message_pointer + offset;
let pointer_points_at_or_after_start_of_name = points_to_label_at >= start_of_name_pointer;
if unlikely!(pointer_points_at_or_after_start_of_name)
{
return Err(LabelPointerPointsToDataAfterTheStartOfTheCurrentlyBeingParsedName)
}
let compressed_offset = points_to_label_at as u16;
let &ParsedLabelInformation { mut number_of_uncompressed_labels_with_all_pointers_resolved, mut length_of_all_labels_including_period } = self.parsed_labels.get(&compressed_offset).ok_or(LabelPointerPointsToALabelThatWasNotPreviouslyParsed(offset))?;
let number_of_labels = number_of_uncompressed_labels_with_all_pointers_resolved + labels_register_reference.len() as u8;
if unlikely!(number_of_labels > WithCompressionParsedName::MaximumNumberOfLabels as u8)
{
return Err(LabelPointerCreatesADnsNameLongerThan127Labels)
}
for (label_starts_at_pointer, label_bytes_length_including_trailing_period) in labels_register_reference.iter().rev()
{
number_of_uncompressed_labels_with_all_pointers_resolved += 1;
let label_starts_at_pointer = *label_starts_at_pointer;
let label_bytes_length_including_trailing_period = *label_bytes_length_including_trailing_period;
debug_assert_ne!(label_bytes_length_including_trailing_period, 0, "label_bytes_length_including_trailing_period was zero");
length_of_all_labels_including_period = length_of_all_labels_including_period.checked_add(label_bytes_length_including_trailing_period).ok_or(LabelPointerCreatesADnsNameLongerThan255Bytes)?;
debug_assert!(label_starts_at_pointer >= self.start_of_message_pointer, "offset occurs before start_of_message_pointer");
let offset = label_starts_at_pointer - self.start_of_message_pointer;
debug_assert!(offset <= ::std::u16::MAX as usize, "offset `{}` exceeds ::std::u16::MAX", offset);
let previous = self.parsed_labels.insert(offset as u16, ParsedLabelInformation { number_of_uncompressed_labels_with_all_pointers_resolved, length_of_all_labels_including_period });
debug_assert_eq!(previous, None, "duplicate uncompressed label");
}
Ok((points_to_label_at, number_of_labels, length_of_all_labels_including_period))
}
}
|
#[macro_use]
extern crate yew;
use yew::prelude::*;
use yew::services::console::ConsoleService;
mod map;
use map::{Cell, CellColors, Coordinate, Map};
struct Context {
console: ConsoleService,
}
struct Model {
map: Map,
player: CellColors,
}
#[derive(Debug)]
enum Msg {
Nope,
Hand(usize, usize),
}
impl Component<Context> for Model {
type Msg = Msg;
type Properties = ();
fn create(_: Self::Properties, _: &mut Env<Context, Self>) -> Self {
Model {
map: Map::new(8, 8),
player: CellColors::Black,
}
}
fn update(&mut self, msg: Self::Msg, context: &mut Env<Context, Self>) -> ShouldRender {
println!("msg: {:?}", msg);
match msg {
Msg::Hand(row, column) => {
self.map.put_hand(row, column, self.player);
println!("in hand row:{} column:{}", row, column);
self.switch_player();
}
Msg::Nope => (),
}
true
}
}
impl Renderable<Context, Model> for Model {
fn view(&self) -> Html<Context, Self> {
html! {
<div>
{ self.render_player_indicator() }
{ self.render_map() }
</div>
}
}
}
impl Model {
fn render_map(&self) -> Html<Context, Self> {
let render_map_elem = |cell: &Cell| {
let c = cell.clone();
match cell.color {
CellColors::Empty
if self.map
.is_reversible(Coordinate(cell.row, cell.column), self.player) =>
{
html!{
<td class=("gray-cell", "clickable"), onclick=move |_: MouseData| Msg::Hand(c.row, c.column), />
}
}
CellColors::Empty => html!{ <td class="gray-cell", ></td> },
CellColors::Black => html!{ <td class="black-cell" ,></td> },
CellColors::White => html!{ <td class="white-cell" ,></td> },
}
};
html!{
<table>{ for self.map.inner_map.iter().map(|column| {
html!{
<tr>
{ for column.iter().map(|cell| render_map_elem(cell)) }
</tr>
}
})}</table>
}
}
fn render_player_indicator(&self) -> Html<Context, Self> {
html! {
<div class="player-indicator-container", >
<span>{ "player:" }</span>
<span class=("player-indicator", {
match self.player {
CellColors::Black => "player-black",
CellColors::White => "player-white",
_ => "",
}
}),></span>
</div>
}
// match self.player {
// CellColors::Black => html!{
// <p>player: <div class=("player-indicator", "player-black"),></div></p>
// },
// CellColors::White => html!{
// <p>player: <div class=("player-indicator", "player-white"),></div></p>
// },
// _ => unreachable!(),
// }
}
fn switch_player(&mut self) {
self.player = match self.player {
CellColors::White => CellColors::Black,
CellColors::Black => CellColors::White,
CellColors::Empty => unreachable!(),
}
}
}
fn main() {
yew::initialize();
let context = Context {
console: ConsoleService,
};
let app: App<_, Model> = App::new(context);
app.mount_to_body();
yew::run_loop();
}
|
#![allow(clippy::upper_case_acronyms)]
/*
TODO: This isn't all that efficient. We could gain some efficiencies using a generated lexer.
TODO: Proper lookahead.
*/
use regex::Regex;
use rust_decimal::Decimal;
use std::fmt;
use std::iter::FromIterator;
use self::context::*;
// TODO: Add in some sort of message or reason.
#[derive(Debug)]
pub struct LexicalError<'input> {
pub line: &'input str,
pub line_number: usize,
pub start_pos: usize,
pub end_pos: usize,
pub lexer_state: String,
pub reason: String,
}
mod context {
use super::LexicalError;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum LexerState {
Normal(NormalVariant),
Comment1,
Comment2,
String,
QuotedIdentifier,
LiteralStart,
LiteralEnd,
LiteralBody,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum NormalVariant {
Any,
Body,
}
pub struct Context {
current_line: usize,
current_position: usize,
pub last_char: char,
pub buffer: Vec<char>,
pub literal: Vec<char>,
state: Vec<LexerState>,
}
impl Context {
pub fn new(start: NormalVariant) -> Self {
Context {
current_line: 0,
current_position: 0,
last_char: '\0',
buffer: Vec::new(),
literal: Vec::new(),
state: vec![LexerState::Normal(start)],
}
}
pub fn new_line(&mut self) {
self.current_line += 1;
self.current_position = 0;
self.last_char = '\0'; // Start fresh
}
pub fn next_char(&mut self, c: char) {
self.current_position += 1;
self.last_char = c;
}
pub fn create_error<'input, T: Into<String>>(&self, line: &'input str, reason: T) -> LexicalError<'input> {
LexicalError {
line,
line_number: self.current_line,
start_pos: self.current_position - self.buffer.len(),
end_pos: self.current_position,
lexer_state: self
.state
.iter()
.map(|s| match s {
LexerState::Normal(variant) => match variant {
NormalVariant::Any => "Normal(Any)",
NormalVariant::Body => "Normal(Body)",
},
LexerState::Comment1 => "CommentLine",
LexerState::Comment2 => "CommentBlock",
LexerState::String => "String",
LexerState::QuotedIdentifier => "QuotedIdentifier",
LexerState::LiteralStart => "LiteralBegin",
LexerState::LiteralBody => "Literal",
LexerState::LiteralEnd => "LiteralEnd",
})
.collect::<Vec<_>>()
.join(" -> "),
reason: reason.into(),
}
}
pub fn push_state(&mut self, state: LexerState) {
self.state.push(state);
}
pub fn pop_state(&mut self) {
self.state.pop();
}
pub fn replace_state(&mut self, state: LexerState) {
self.state.pop();
self.state.push(state);
}
pub fn peek_state(&self) -> LexerState {
if let Some(item) = self.state.last() {
*item
} else {
panic!("Nothing left in the stack");
}
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Token {
ACTION,
ARRAY,
AS,
ASC,
BIGINT,
BIGSERIAL,
BIT,
BOOL,
BOOLEAN,
BTREE,
CASCADE,
CONSTRAINT,
CHAR,
CHARACTER,
CREATE,
DATE,
DEFAULT,
DELETE,
DESC,
DOUBLE,
ENUM,
EXTENSION,
FILLFACTOR,
FIRST,
FOREIGN,
FULL,
FUNCTION,
GIN,
GIST,
HASH,
IN,
INDEX,
INOUT,
INT,
INT2,
INT4,
INT8,
INTEGER,
JSON,
JSONB,
KEY,
LANGUAGE,
LAST,
MATCH,
MONEY,
NO,
NOT,
NULL,
NULLS,
NUMERIC,
ON,
OR,
OUT,
PARTIAL,
PRECISION,
PRIMARY,
REAL,
REFERENCES,
REPLACE,
RESTRICT,
RETURNS,
SCHEMA,
SERIAL,
SERIAL2,
SERIAL4,
SERIAL8,
SET,
SETOF,
SIMPLE,
SMALLINT,
SMALLSERIAL,
TABLE,
TEXT,
TIME,
TIMESTAMP,
TIMESTAMPTZ,
TIMETZ,
TYPE,
UNIQUE,
UPDATE,
USING,
UUID,
VARBIT,
VARCHAR,
VARIADIC,
VARYING,
WITH,
WITHOUT,
ZONE,
Identifier(String),
Digit(i32),
Decimal(Decimal),
Boolean(bool),
StringValue(String),
Literal(String),
LeftBracket,
RightBracket,
LeftSquare,
RightSquare,
Colon,
Comma,
Period,
Semicolon,
Equals,
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Token::ACTION => write!(f, "ACTION"),
Token::ARRAY => write!(f, "ARRAY"),
Token::AS => write!(f, "AS"),
Token::ASC => write!(f, "ASC"),
Token::BIGINT => write!(f, "BIGINT"),
Token::BIGSERIAL => write!(f, "BIGSERIAL"),
Token::BIT => write!(f, "BIT"),
Token::BOOL => write!(f, "BOOL"),
Token::BOOLEAN => write!(f, "BOOLEAN"),
Token::BTREE => write!(f, "BTREE"),
Token::CASCADE => write!(f, "CASCADE"),
Token::CONSTRAINT => write!(f, "CONSTRAINT"),
Token::CHAR => write!(f, "CHAR"),
Token::CHARACTER => write!(f, "CHARACTER"),
Token::CREATE => write!(f, "CREATE"),
Token::DATE => write!(f, "DATE"),
Token::DEFAULT => write!(f, "DEFAULT"),
Token::DELETE => write!(f, "DELETE"),
Token::DESC => write!(f, "DESC"),
Token::DOUBLE => write!(f, "DOUBLE"),
Token::ENUM => write!(f, "ENUM"),
Token::EXTENSION => write!(f, "EXTENSION"),
Token::FILLFACTOR => write!(f, "FILLFACTOR"),
Token::FIRST => write!(f, "FIRST"),
Token::FOREIGN => write!(f, "FOREIGN"),
Token::FULL => write!(f, "FULL"),
Token::FUNCTION => write!(f, "FUNCTION"),
Token::GIN => write!(f, "GIN"),
Token::GIST => write!(f, "GIST"),
Token::HASH => write!(f, "HASH"),
Token::IN => write!(f, "IN"),
Token::INDEX => write!(f, "INDEX"),
Token::INOUT => write!(f, "INOUT"),
Token::INT => write!(f, "INT"),
Token::INT2 => write!(f, "INT2"),
Token::INT4 => write!(f, "INT4"),
Token::INT8 => write!(f, "INT8"),
Token::INTEGER => write!(f, "INTEGER"),
Token::JSON => write!(f, "JSON"),
Token::JSONB => write!(f, "JSONB"),
Token::KEY => write!(f, "KEY"),
Token::LANGUAGE => write!(f, "LANGUAGE"),
Token::LAST => write!(f, "LAST"),
Token::MATCH => write!(f, "MATCH"),
Token::MONEY => write!(f, "MONEY"),
Token::NO => write!(f, "NO"),
Token::NOT => write!(f, "NOT"),
Token::NULL => write!(f, "NULL"),
Token::NULLS => write!(f, "NULLS"),
Token::NUMERIC => write!(f, "NUMERIC"),
Token::ON => write!(f, "ON"),
Token::OR => write!(f, "OR"),
Token::OUT => write!(f, "OUT"),
Token::PARTIAL => write!(f, "PARTIAL"),
Token::PRECISION => write!(f, "PRECISION"),
Token::PRIMARY => write!(f, "PRIMARY"),
Token::REAL => write!(f, "REAL"),
Token::REFERENCES => write!(f, "REFERENCES"),
Token::REPLACE => write!(f, "REPLACE"),
Token::RESTRICT => write!(f, "RESTRICT"),
Token::RETURNS => write!(f, "RETURNS"),
Token::SCHEMA => write!(f, "SCHEMA"),
Token::SERIAL => write!(f, "SERIAL"),
Token::SERIAL2 => write!(f, "SERIAL2"),
Token::SERIAL4 => write!(f, "SERIAL4"),
Token::SERIAL8 => write!(f, "SERIAL8"),
Token::SET => write!(f, "SET"),
Token::SETOF => write!(f, "SETOF"),
Token::SIMPLE => write!(f, "SIMPLE"),
Token::SMALLINT => write!(f, "SMALLINT"),
Token::SMALLSERIAL => write!(f, "SMALLSERIAL"),
Token::TABLE => write!(f, "TABLE"),
Token::TEXT => write!(f, "TEXT"),
Token::TIME => write!(f, "TIME"),
Token::TIMESTAMP => write!(f, "TIMESTAMP"),
Token::TIMESTAMPTZ => write!(f, "TIMESTAMPTZ"),
Token::TIMETZ => write!(f, "TIMETZ"),
Token::TYPE => write!(f, "TYPE"),
Token::UNIQUE => write!(f, "UNIQUE"),
Token::UPDATE => write!(f, "UPDATE"),
Token::USING => write!(f, "USING"),
Token::UUID => write!(f, "UUID"),
Token::VARBIT => write!(f, "VARBIT"),
Token::VARCHAR => write!(f, "VARCHAR"),
Token::VARIADIC => write!(f, "VARIADIC"),
Token::VARYING => write!(f, "VARYING"),
Token::WITH => write!(f, "WITH"),
Token::WITHOUT => write!(f, "WITHOUT"),
Token::ZONE => write!(f, "ZONE"),
Token::Identifier(ref ident) => write!(f, "Ident({})", ident),
Token::Digit(i) => write!(f, "{}", i),
Token::Decimal(d) => write!(f, "{}", d),
Token::Boolean(b) => write!(f, "{}", if b { "TRUE" } else { "FALSE" }),
Token::StringValue(ref s) => write!(f, "'{}'", s),
Token::Literal(ref s) => write!(f, "$$ {} $$", s),
Token::LeftBracket => write!(f, "("),
Token::RightBracket => write!(f, ")"),
Token::LeftSquare => write!(f, "["),
Token::RightSquare => write!(f, "]"),
Token::Colon => write!(f, ":"),
Token::Comma => write!(f, ","),
Token::Period => write!(f, "."),
Token::Semicolon => write!(f, ";"),
Token::Equals => write!(f, "="),
}
}
}
lazy_static! {
static ref IDENTIFIER: Regex = Regex::new("^[a-zA-Z][a-zA-Z0-9_]*$").unwrap();
static ref DECIMAL: Regex = Regex::new("^\\d+\\.\\d+$").unwrap();
static ref DIGIT: Regex = Regex::new("^\\d+$").unwrap();
}
macro_rules! tokenize_normal_buffer {
($context:ident, $line:ident, $tokens:ident) => {{
if $context.buffer.len() > 0 {
let token = match self::create_normal_token(&mut $context) {
Some(t) => t,
None => return Err($context.create_error($line, "unexpected token")),
};
push_token!($tokens, token);
$context.buffer.clear();
}
}};
}
macro_rules! match_keyword {
($value:ident, $enum_value:ident) => {{
let raw = stringify!($enum_value);
if raw.eq_ignore_ascii_case(&$value[..]) {
return Some(Token::$enum_value);
}
}};
}
macro_rules! match_keyword_replace_state {
($context:ident, $variant:expr, $value:ident, $enum_value:ident) => {{
let raw = stringify!($enum_value);
if raw.eq_ignore_ascii_case(&$value[..]) {
$context.replace_state(LexerState::Normal($variant));
return Some(Token::$enum_value);
}
}};
}
macro_rules! push_token {
($tokens:ident, $symbol:expr) => {
//println!("{}", $symbol);
$tokens.push($symbol);
};
}
fn create_normal_token(context: &mut Context) -> Option<Token> {
let variant = if let LexerState::Normal(variant) = context.peek_state() {
variant
} else {
return None;
};
let value = String::from_iter(context.buffer.clone());
if "true".eq_ignore_ascii_case(&value[..]) {
return Some(Token::Boolean(true));
}
if "false".eq_ignore_ascii_case(&value[..]) {
return Some(Token::Boolean(false));
}
// Keywords - this is very naive and should be generated.
if let NormalVariant::Any = variant {
match_keyword!(value, CREATE);
match_keyword!(value, OR);
match_keyword!(value, REPLACE);
// Any of the below will switch state. This only gets reset on statement end.
match_keyword_replace_state!(context, NormalVariant::Body, value, EXTENSION);
match_keyword_replace_state!(context, NormalVariant::Body, value, FUNCTION);
match_keyword_replace_state!(context, NormalVariant::Body, value, INDEX);
match_keyword_replace_state!(context, NormalVariant::Body, value, SCHEMA);
match_keyword_replace_state!(context, NormalVariant::Body, value, TABLE);
}
match_keyword!(value, ACTION);
match_keyword!(value, ARRAY);
match_keyword!(value, AS);
match_keyword!(value, ASC);
match_keyword!(value, BIGINT);
match_keyword!(value, BIGSERIAL);
match_keyword!(value, BIT);
match_keyword!(value, BOOL);
match_keyword!(value, BOOLEAN);
match_keyword!(value, BTREE);
match_keyword!(value, CASCADE);
match_keyword!(value, CONSTRAINT);
match_keyword!(value, CHAR);
match_keyword!(value, CHARACTER);
match_keyword!(value, DATE);
match_keyword!(value, DEFAULT);
match_keyword!(value, DELETE);
match_keyword!(value, DESC);
match_keyword!(value, DOUBLE);
match_keyword!(value, ENUM);
match_keyword!(value, FILLFACTOR);
match_keyword!(value, FIRST);
match_keyword!(value, FOREIGN);
match_keyword!(value, FULL);
match_keyword!(value, GIN);
match_keyword!(value, GIST);
match_keyword!(value, HASH);
match_keyword!(value, IN);
match_keyword!(value, INOUT);
match_keyword!(value, INT);
match_keyword!(value, INT2);
match_keyword!(value, INT4);
match_keyword!(value, INT8);
match_keyword!(value, INTEGER);
match_keyword!(value, JSON);
match_keyword!(value, JSONB);
match_keyword!(value, KEY);
match_keyword!(value, LANGUAGE);
match_keyword!(value, LAST);
match_keyword!(value, MATCH);
match_keyword!(value, MONEY);
match_keyword!(value, NO);
match_keyword!(value, NOT);
match_keyword!(value, NULL);
match_keyword!(value, NULLS);
match_keyword!(value, NUMERIC);
match_keyword!(value, ON);
match_keyword!(value, OR);
match_keyword!(value, OUT);
match_keyword!(value, PARTIAL);
match_keyword!(value, PRECISION);
match_keyword!(value, PRIMARY);
match_keyword!(value, REAL);
match_keyword!(value, REFERENCES);
match_keyword!(value, RESTRICT);
match_keyword!(value, RETURNS);
match_keyword!(value, SERIAL);
match_keyword!(value, SERIAL2);
match_keyword!(value, SERIAL4);
match_keyword!(value, SERIAL8);
match_keyword!(value, SET);
match_keyword!(value, SETOF);
match_keyword!(value, SIMPLE);
match_keyword!(value, SMALLINT);
match_keyword!(value, SMALLSERIAL);
match_keyword!(value, TABLE); // The one exception
match_keyword!(value, TEXT);
match_keyword!(value, TIME);
match_keyword!(value, TIMESTAMP);
match_keyword!(value, TIMESTAMPTZ);
match_keyword!(value, TIMETZ);
match_keyword!(value, TYPE);
match_keyword!(value, UNIQUE);
match_keyword!(value, UPDATE);
match_keyword!(value, USING);
match_keyword!(value, UUID);
match_keyword!(value, VARBIT);
match_keyword!(value, VARCHAR);
match_keyword!(value, VARIADIC);
match_keyword!(value, VARYING);
match_keyword!(value, WITH);
match_keyword!(value, WITHOUT);
match_keyword!(value, ZONE);
// Regex
if IDENTIFIER.is_match(&value[..]) {
return Some(Token::Identifier(value));
}
if DECIMAL.is_match(&value[..]) {
return Some(Token::Decimal(value.parse::<Decimal>().unwrap()));
}
if DIGIT.is_match(&value[..]) {
return Some(Token::Digit(value.parse::<i32>().unwrap()));
}
// Error
None
}
pub fn tokenize_body(text: &str) -> Result<Vec<Token>, LexicalError> {
tokenize(text, NormalVariant::Body)
}
pub fn tokenize_stmt(text: &str) -> Result<Vec<Token>, LexicalError> {
tokenize(text, NormalVariant::Any)
}
fn tokenize(text: &str, start: NormalVariant) -> Result<Vec<Token>, LexicalError> {
// This tokenizer is whitespace dependent by default, i.e. whitespace is relevant.
let mut tokens = Vec::new();
let mut context = Context::new(start);
// Loop through each character, halting on whitespace
// Our outer loop works by newline
let lines: Vec<&str> = text.split('\n').collect();
for line in lines {
context.new_line();
for c in line.chars() {
match context.peek_state() {
LexerState::Normal(_) => {
// Check if we should be entering the comment state
if context.last_char == '-' && c == '-' {
// take off the previous item as it was a comment character and push the buffer
context.buffer.pop();
tokenize_normal_buffer!(context, line, tokens);
context.push_state(LexerState::Comment1);
} else if context.last_char == '/' && c == '*' {
// take off the previous item as it was a comment character and push the buffer
context.buffer.pop();
tokenize_normal_buffer!(context, line, tokens);
context.push_state(LexerState::Comment2);
} else if c == '\'' {
if context.buffer.is_empty() {
context.push_state(LexerState::String);
} else {
// Invalid state - must be something like xx'dd
return Err(context.create_error(line, "' was unexpected"));
}
} else if c == '"' {
if context.buffer.is_empty() {
context.push_state(LexerState::QuotedIdentifier);
} else {
// Invalid state - Must be something like xx"dd
return Err(context.create_error(line, "\" was unexpected"));
}
} else if c == '$' {
if context.buffer.is_empty() {
context.push_state(LexerState::LiteralStart);
} else {
// Unsupported state in our lexer
return Err(context.create_error(line, "$ was unexpected"));
}
} else if c.is_whitespace() {
// Simple check for whitespace
tokenize_normal_buffer!(context, line, tokens);
} else {
// If it is a symbol then don't bother with the buffer
match c {
'(' => {
tokenize_normal_buffer!(context, line, tokens);
push_token!(tokens, Token::LeftBracket);
}
')' => {
tokenize_normal_buffer!(context, line, tokens);
push_token!(tokens, Token::RightBracket);
}
',' => {
tokenize_normal_buffer!(context, line, tokens);
push_token!(tokens, Token::Comma);
}
':' => {
tokenize_normal_buffer!(context, line, tokens);
push_token!(tokens, Token::Colon);
}
';' => {
tokenize_normal_buffer!(context, line, tokens);
push_token!(tokens, Token::Semicolon);
context.replace_state(LexerState::Normal(NormalVariant::Any));
}
'=' => {
tokenize_normal_buffer!(context, line, tokens);
push_token!(tokens, Token::Equals);
}
'.' => {
// If it is just a plain digit in the buffer, then allow it to continue.
if context.buffer.iter().all(|c: &char| c.is_digit(10)) {
context.buffer.push(c);
} else {
tokenize_normal_buffer!(context, line, tokens);
push_token!(tokens, Token::Period);
}
}
'[' => {
tokenize_normal_buffer!(context, line, tokens);
push_token!(tokens, Token::LeftSquare);
}
']' => {
tokenize_normal_buffer!(context, line, tokens);
push_token!(tokens, Token::RightSquare);
}
_ => context.buffer.push(c),
}
}
}
LexerState::Comment1 => {
// Ignore comments
}
LexerState::Comment2 => {
if context.last_char == '*' && c == '/' {
context.pop_state();
}
// Ignore comments
}
LexerState::String => {
if c == '\'' {
push_token!(tokens, Token::StringValue(String::from_iter(context.buffer.clone())));
context.buffer.clear();
context.pop_state();
} else {
context.buffer.push(c);
}
}
LexerState::QuotedIdentifier => {
if c == '"' {
push_token!(tokens, Token::Identifier(String::from_iter(context.buffer.clone())));
context.buffer.clear();
context.pop_state();
} else {
context.buffer.push(c);
}
}
LexerState::LiteralStart => {
if c == '$' {
context.replace_state(LexerState::LiteralBody);
} else {
context.literal.push(c);
}
}
LexerState::LiteralEnd => {
if c == '$' {
if context.literal.is_empty() {
context.pop_state();
} else {
// Error: literal name mismatch
return Err(context.create_error(line, "literal name mismatch - leftover characters"));
}
} else if context.literal.is_empty() {
// Error: literal name mismatch
return Err(context.create_error(line, "literal name mismatch - exhausted characters"));
} else {
let l = context.literal.pop().unwrap();
if l != c {
// Error: literal name mismatch
return Err(context.create_error(line, "literal name mismatch - unexpected character"));
}
}
}
LexerState::LiteralBody => {
// We only escape from a literal body if the next few characters are
// in fact part of the literal. For example, we may be using $1 as a positional
// argument.
// Since we don't have a lookahead system implemented (yet) we do some naive checks
// here.
if context.last_char == '$' {
if context.literal.is_empty() {
if c == '$' {
// We've parsed a complete literal
context.buffer.pop(); // Pop off the previous $
// Add the token
let data = String::from_iter(context.buffer.clone());
push_token!(tokens, Token::Literal(data.trim().into()));
context.buffer.clear();
context.pop_state();
} else {
// We're still in the buffer
context.buffer.push(c);
}
} else {
// This is a naive check only looking at the first character
if context.literal[0] == c {
context.buffer.pop(); // Pop off the previous $
let data = String::from_iter(context.buffer.clone());
push_token!(tokens, Token::Literal(data.trim().into()));
context.buffer.clear();
context.literal.reverse();
context.literal.pop(); // we've already confirmed the first char
context.replace_state(LexerState::LiteralEnd);
} else {
// We're still in the buffer
context.buffer.push(c);
}
}
} else {
context.buffer.push(c);
}
}
}
// Move the current_position
context.next_char(c);
}
// If we were a single line comment, we go back to a normal state on a new line
match context.peek_state() {
LexerState::Normal(_) => {
// We may also have a full buffer
tokenize_normal_buffer!(context, line, tokens);
}
LexerState::Comment1 => {
// End of a line finishes the comment
context.pop_state();
}
LexerState::Comment2 => {
// Do nothing at the end of a line - it's a multi-line comment
}
LexerState::String | LexerState::QuotedIdentifier | LexerState::LiteralStart | LexerState::LiteralEnd => {
// If we're in these states at the end of a line it's an error
// (e.g. at the moment we don't support multi-line strings)
return Err(context.create_error(line, "end of line was unexpected"));
}
LexerState::LiteralBody => {
// Add a new line onto the buffer
context.buffer.push('\n');
}
}
}
Ok(tokens)
}
|
use super::{Index, Length};
use std::marker::PhantomData;
use std::mem::MaybeUninit;
use std::{cmp, fmt, hash, mem, ops, slice, vec};
#[repr(transparent)]
pub struct Slice<I: Index, T>(PhantomData<fn(I) -> I>, [T]);
impl<I: Index, T: fmt::Debug> fmt::Debug for Slice<I, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.1, f)
}
}
impl<I: Index, T> Slice<I, T> {
#[inline(always)]
pub unsafe fn from_raw_parts<'a>(p: *const T, len: Length<I>) -> &'a Slice<I, T> {
&*(slice::from_raw_parts(p, len.as_usize()) as *const [T] as *const Self)
}
#[inline(always)]
pub fn at(s: &[T]) -> &Slice<I, T> {
let _ = Length::<I>::new(s.len());
unsafe { Self::at_unchecked(s) }
}
#[inline(always)]
pub fn at_mut(s: &mut [T]) -> &mut Slice<I, T> {
let _ = Length::<I>::new(s.len());
unsafe { Self::at_unchecked_mut(s) }
}
#[inline(always)]
pub unsafe fn at_unchecked(s: &[T]) -> &Slice<I, T> {
&*(s as *const [T] as *const Self)
}
#[inline(always)]
pub unsafe fn at_unchecked_mut(s: &mut [T]) -> &mut Slice<I, T> {
&mut *(s as *mut [T] as *mut Self)
}
#[inline(always)]
pub fn len(&self) -> Length<I> {
unsafe { Length::new_unchecked(self.1.len()) }
}
#[inline(always)]
pub fn is_empty(&self) -> bool {
self.1.is_empty()
}
#[inline(always)]
pub fn as_ui(&self) -> &[T] {
&self.1
}
#[inline(always)]
pub fn as_mut_ui(&mut self) -> &mut [T] {
&mut self.1
}
#[inline(always)]
pub unsafe fn get_unchecked(&self, i: I) -> &T {
self.1.get_unchecked(i.as_usize())
}
#[inline(always)]
pub unsafe fn get_unchecked_mut(&mut self, i: I) -> &mut T {
self.1.get_unchecked_mut(i.as_usize())
}
#[inline(always)]
pub fn get(&self, i: I) -> Option<&T> {
self.1.get(i.as_usize())
}
#[inline(always)]
pub fn get_mut(&mut self, i: I) -> Option<&mut T> {
self.1.get_mut(i.as_usize())
}
#[inline]
pub fn split_around(&self, i: I) -> (&[T], &T, &[T]) {
let i = i.as_usize();
let p = self.1.as_ptr();
let n = self.1.len();
assert!(i < n);
unsafe {
(
slice::from_raw_parts(p, i),
&*p.add(i),
slice::from_raw_parts(p.add(i + 1), n - i - 1),
)
}
}
#[inline]
pub fn split_around_mut(&mut self, i: I) -> (&mut [T], &mut T, &mut [T]) {
let i = i.as_usize();
let p = self.1.as_mut_ptr();
let n = self.1.len();
assert!(i < n);
unsafe {
(
slice::from_raw_parts_mut(p, i),
&mut *p.add(i),
slice::from_raw_parts_mut(p.add(i + 1), n - i - 1),
)
}
}
#[inline(always)]
pub fn iter(&self) -> slice::Iter<T> {
self.as_ui().iter()
}
#[inline(always)]
pub fn enum_iter(&self) -> impl Iterator<Item = (I, &T)> {
self.iter()
.enumerate()
.map(|(i, v)| (unsafe { I::from_usize_unchecked(i) }, v))
}
#[inline(always)]
pub fn iter_mut(&mut self) -> slice::IterMut<T> {
self.as_mut_ui().iter_mut()
}
#[inline(always)]
pub fn enum_iter_mut(&mut self) -> impl Iterator<Item = (I, &mut T)> {
self.iter_mut()
.enumerate()
.map(|(i, v)| (unsafe { I::from_usize_unchecked(i) }, v))
}
#[inline(always)]
pub fn map<U>(&self, mut f: impl FnMut(I, &T) -> U) -> Array<I, U> {
Array::new(self.len(), |i| f(i, unsafe { self.get_unchecked(i) }))
}
#[inline(always)]
pub fn as_array(&self) -> Array<I, T>
where
T: Copy,
{
Array(Box::<[T]>::from(self.as_ui()), PhantomData)
}
#[inline(always)]
pub fn map_update<U>(
&self,
ix: I,
fi: impl FnOnce(&T) -> U,
mut fr: impl FnMut(I, &T) -> U,
) -> Array<I, U> {
assert!(self.len() > ix);
let fi = mem::MaybeUninit::new(fi);
self.map(move |i, v| {
if i == ix {
(unsafe { fi.as_ptr().read() })(v)
} else {
fr(i, v)
}
})
}
}
impl<I: Index, T: PartialEq<U>, U> PartialEq<Slice<I, U>> for Slice<I, T> {
#[inline(always)]
fn eq(&self, other: &Slice<I, U>) -> bool {
self.1 == other.1
}
}
impl<I: Index, T: Eq> Eq for Slice<I, T> {}
impl<I: Index, T: Ord> PartialOrd for Slice<I, T> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
let n = self.len();
match n.cmp(&other.len()) {
cmp::Ordering::Equal => {}
r => {
return Some(r);
}
}
cmp::PartialOrd::partial_cmp(&self.1, &other.1)
}
}
impl<I: Index, T: Ord> Ord for Slice<I, T> {
#[inline]
fn cmp(&self, other: &Self) -> cmp::Ordering {
let n = self.len();
match n.cmp(&other.len()) {
cmp::Ordering::Equal => {}
r => {
return r;
}
}
cmp::Ord::cmp(&self.1, &other.1)
}
}
impl<I: Index, T: hash::Hash> hash::Hash for Slice<I, T> {
#[inline]
fn hash<H: hash::Hasher>(&self, h: &mut H) {
hash::Hash::hash_slice(self.as_ui(), h)
}
}
impl<I: Index, T> ops::Index<I> for Slice<I, T> {
type Output = T;
#[inline(always)]
fn index(&self, i: I) -> &T {
&self.as_ui()[i.as_usize()]
}
}
impl<I: Index, T> ops::IndexMut<I> for Slice<I, T> {
#[inline(always)]
fn index_mut(&mut self, i: I) -> &mut T {
&mut self.as_mut_ui()[i.as_usize()]
}
}
pub struct Array<I: Index, T>(Box<[T]>, PhantomData<fn(I) -> I>);
impl<I: Index, T: Clone> Clone for Array<I, T> {
#[inline]
fn clone(&self) -> Self {
Self(self.0.clone(), self.1)
}
#[inline]
fn clone_from(&mut self, other: &Self) {
self.0.clone_from(&other.0)
}
}
impl<I: Index, T: fmt::Debug> fmt::Debug for Array<I, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
impl<I: Index, T> Array<I, T> {
fn alloc(n: Length<I>) -> Box<[MaybeUninit<T>]> {
let ni = n.as_usize();
let mut v = Vec::with_capacity(ni);
unsafe {
v.set_len(ni);
}
v.into_boxed_slice()
}
#[inline]
unsafe fn new_init(v: Box<[MaybeUninit<T>]>) -> Self {
Self(Box::from_raw(Box::into_raw(v) as *mut _), PhantomData)
}
#[inline]
fn into_init(self) -> Box<[MaybeUninit<T>]> {
unsafe { Box::from_raw(Box::into_raw(self.0) as *mut _) }
}
#[inline]
pub fn new_empty() -> Self {
Self::new_box(Box::<[T]>::default())
}
#[inline]
pub fn new(n: Length<I>, mut f: impl FnMut(I) -> T) -> Array<I, T> {
let mut p = Self::alloc(n);
unsafe {
for (i, v) in p.iter_mut().enumerate() {
v.as_mut_ptr().write(f(I::from_usize_unchecked(i)));
}
Self::new_init(p)
}
}
#[inline]
pub fn new_vec(v: Vec<T>) -> Array<I, T> {
Self::new_box(v.into_boxed_slice())
}
#[inline]
pub fn new_box(v: Box<[T]>) -> Array<I, T> {
let _ = Length::<I>::new(v.len());
Self(v, PhantomData)
}
#[inline]
pub fn into_boxed_slice(self) -> Box<[T]> {
self.0
}
#[inline(always)]
pub fn map_into<U>(self, mut f: impl FnMut(I, T) -> U) -> Array<I, U> {
if mem::size_of::<T>() != mem::size_of::<U>() {
let p0 = self.into_init();
let n = p0.len();
unsafe {
let mut p = Array::<I, U>::alloc(Length::new_unchecked(n));
for i in 0..n {
p.get_unchecked_mut(i).as_mut_ptr().write(f(
I::from_usize_unchecked(i),
p0.get_unchecked(i).as_ptr().read(),
))
}
Array::new_init(p)
}
} else {
let mut p = self.into_init();
unsafe {
for i in 0..p.len() {
(p.get_unchecked_mut(i).as_mut_ptr() as *mut U).write(f(
I::from_usize_unchecked(i),
p.get_unchecked(i).as_ptr().read(),
))
}
Array::new_init(Box::from_raw(Box::into_raw(p) as *mut [MaybeUninit<U>]))
}
}
}
}
impl<I: Index, T> ops::Deref for Array<I, T> {
type Target = Slice<I, T>;
#[inline(always)]
fn deref(&self) -> &Slice<I, T> {
unsafe { Slice::at_unchecked(&*self.0) }
}
}
impl<I: Index, T> ops::DerefMut for Array<I, T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Slice<I, T> {
unsafe { Slice::at_unchecked_mut(&mut *self.0) }
}
}
impl<I: Index, T: PartialEq<U>, U> PartialEq<Array<I, U>> for Array<I, T> {
#[inline(always)]
fn eq(&self, other: &Array<I, U>) -> bool {
**self == **other
}
#[inline(always)]
fn ne(&self, other: &Array<I, U>) -> bool {
**self != **other
}
}
impl<I: Index, T: Eq> Eq for Array<I, T> {}
impl<I: Index, T: Ord> PartialOrd for Array<I, T> {
#[inline(always)]
fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
cmp::PartialOrd::partial_cmp(&**self, &**other)
}
}
impl<I: Index, T: Ord> Ord for Array<I, T> {
#[inline(always)]
fn cmp(&self, other: &Self) -> cmp::Ordering {
cmp::Ord::cmp(&**self, &**other)
}
}
impl<I: Index, T: hash::Hash> hash::Hash for Array<I, T> {
#[inline(always)]
fn hash<H: hash::Hasher>(&self, h: &mut H) {
hash::Hash::hash_slice(&*self.0, h)
}
}
impl<I: Index, T, X> ops::Index<X> for Array<I, T>
where
Slice<I, T>: ops::Index<X>,
{
type Output = <Slice<I, T> as ops::Index<X>>::Output;
#[inline(always)]
fn index(&self, i: X) -> &Self::Output {
&(**self)[i]
}
}
impl<I: Index, T, X> ops::IndexMut<X> for Array<I, T>
where
Slice<I, T>: ops::IndexMut<X>,
{
#[inline(always)]
fn index_mut(&mut self, i: X) -> &mut Self::Output {
&mut (**self)[i]
}
}
impl<I: Index, T> IntoIterator for Array<I, T> {
type Item = T;
type IntoIter = vec::IntoIter<T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
Vec::from(self.into_boxed_slice()).into_iter()
}
}
|
use tari_wallet::util::emoji::EmojiId;
use tari_crypto::tari_utilities::hex::Hex;
fn main() {
const EMOJI: &str = "🐎🍴🌷🌟💻🐖🐩🐾🌟🐬🎧🐌🏦🐳🐎🐝🐢🔋👕🎸👿🍒🐓🎉💔🌹🏆🐬💡🎳🚦🍹🎒";
const EMOJI_SHORT: &str = "🐎🍴🌷🌟💻🐖🐩🐾🌟🐬🎧🐌🏦🐳🐎🐝🐢🔋👕🎸👿🍒🐓🎉💔🌹🏆🐬💡🎳🚦🍹";
// Convert a public key into its emoji ID
let eid = EmojiId::from_hex("70350e09c474809209824c6e6888707b7dd09959aa227343b5106382b856f73a").unwrap();
println!("{}",eid);
// Convert an emoji to public key (in hex)
let pubkey = EmojiId::str_to_pubkey(EMOJI).unwrap().to_hex();
println!("{}", pubkey);
//Test if both constants declared at the top are valid
assert!(EmojiId::is_valid(EMOJI));
assert_eq!(EmojiId::is_valid(EMOJI_SHORT), false, "Missing checksum");
// TODO - check that emoji ID protects against transcription errors
println!("It's all good!");
}
|
use config::Config;
use crate::stock::StockRS;
mod config;
mod stockplotter;
mod timeseries;
mod mysql_db;
mod alphavantage;
mod stock;
mod buy;
fn main() {
let config = Config::read_config();
let mut stocks = StockRS::from_config(&config);
stocks.update_db();
stocks.plot();
stocks.backtest();
}
|
use std::ffi::CStr;
use std::os::raw::c_char;
use std::ptr;
use libc::c_int;
#[test]
fn test_version() {
let mut implementation: *const c_char = ptr::null();
let mut version_string: *const c_char = ptr::null();
let mut version_major: c_int = 0;
let mut version_minor: c_int = 0;
let mut version_step: c_int = 0;
let mut version_patch: c_int = 0;
unsafe {
sasl2_sys::sasl::sasl_version_info(
&mut implementation,
&mut version_string,
&mut version_major,
&mut version_minor,
&mut version_step,
&mut version_patch,
);
println!(
"implementation={:?} version_string={:?} version={}.{}.{}-{}",
CStr::from_ptr(implementation),
CStr::from_ptr(version_string),
version_major,
version_minor,
version_step,
version_patch
);
}
}
#[test]
fn test_readme_deps() {
version_sync::assert_markdown_deps_updated!("../README.md");
}
#[test]
fn test_html_root_url() {
version_sync::assert_html_root_url_updated!("src/lib.rs");
}
|
// Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use common_catalog::table::Table;
use common_catalog::table_context::TableContext;
use common_exception::Result;
use common_expression::types::StringType;
use common_expression::utils::FromData;
use common_expression::DataBlock;
use common_expression::TableDataType;
use common_expression::TableField;
use common_expression::TableSchemaRefExt;
use common_meta_app::principal::UserSettingValue;
use common_meta_app::schema::TableIdent;
use common_meta_app::schema::TableInfo;
use common_meta_app::schema::TableMeta;
use snailquote::escape;
use crate::SyncOneBlockSystemTable;
use crate::SyncSystemTable;
pub struct SettingsTable {
table_info: TableInfo,
}
impl SyncSystemTable for SettingsTable {
const NAME: &'static str = "system.settings";
fn get_table_info(&self) -> &TableInfo {
&self.table_info
}
fn get_full_data(&self, ctx: Arc<dyn TableContext>) -> Result<DataBlock> {
let settings = ctx.get_settings().get_setting_values();
let mut names: Vec<String> = vec![];
let mut values: Vec<String> = vec![];
let mut defaults: Vec<String> = vec![];
let mut levels: Vec<String> = vec![];
let mut descs: Vec<String> = vec![];
let mut types: Vec<String> = vec![];
for vals in settings {
// Name.
names.push(vals.0);
// Value.
values.push(escape(format!("{:?}", vals.1).as_str()).to_string());
// Default Value.
defaults.push(escape(format!("{:?}", vals.2).as_str()).to_string());
// Scope level.
levels.push(vals.3);
// Desc.
descs.push(vals.4);
let typename = match vals.2 {
UserSettingValue::UInt64(_) => "UInt64",
UserSettingValue::String(_) => "String",
};
// Types.
types.push(typename.to_string());
}
let names: Vec<Vec<u8>> = names.iter().map(|x| x.as_bytes().to_vec()).collect();
let values: Vec<Vec<u8>> = values.iter().map(|x| x.as_bytes().to_vec()).collect();
let defaults: Vec<Vec<u8>> = defaults.iter().map(|x| x.as_bytes().to_vec()).collect();
let levels: Vec<Vec<u8>> = levels.iter().map(|x| x.as_bytes().to_vec()).collect();
let descs: Vec<Vec<u8>> = descs.iter().map(|x| x.as_bytes().to_vec()).collect();
let types: Vec<Vec<u8>> = types.iter().map(|x| x.as_bytes().to_vec()).collect();
Ok(DataBlock::new_from_columns(vec![
StringType::from_data(names),
StringType::from_data(values),
StringType::from_data(defaults),
StringType::from_data(levels),
StringType::from_data(descs),
StringType::from_data(types),
]))
}
}
impl SettingsTable {
pub fn create(table_id: u64) -> Arc<dyn Table> {
let schema = TableSchemaRefExt::create(vec![
TableField::new("name", TableDataType::String),
TableField::new("value", TableDataType::String),
TableField::new("default", TableDataType::String),
TableField::new("level", TableDataType::String),
TableField::new("description", TableDataType::String),
TableField::new("type", TableDataType::String),
]);
let table_info = TableInfo {
desc: "'system'.'settings'".to_string(),
name: "settings".to_string(),
ident: TableIdent::new(table_id, 0),
meta: TableMeta {
schema,
engine: "SystemSettings".to_string(),
..Default::default()
},
..Default::default()
};
SyncOneBlockSystemTable::create(SettingsTable { table_info })
}
}
|
use std::fs;
use std::sync::Arc;
use urlencoding::decode;
use crate::path_utils::get_path;
use crate::request::{Request, RequestHandler, ResponseResult};
use crate::{Opts, Response};
pub struct Get;
impl RequestHandler for Get {
fn get_response<'a>(req: &'a Request, opts: Arc<Opts>) -> ResponseResult<'a> {
let path = get_path(
opts.directory.as_str(),
decode(req.status_line.uri).unwrap().as_str(),
);
// default to index.html for directories
let file_path = if path.is_dir() {
path.join("index.html")
} else {
path.to_path_buf()
};
if file_path.exists() {
let content = fs::read(&file_path)?;
Ok(Response::ok(200, file_path, content))
} else if path.is_dir() {
let dir_contents = path
.read_dir()?
.map(|file| file.unwrap().file_name())
.collect::<Vec<_>>();
let contents_html = dir_contents
.iter()
.map(|file| {
format!(
"<li><a href=\"{}\"</a>{}</li>",
get_path(req.status_line.uri, file.to_str().unwrap())
.to_str()
.unwrap(),
file.to_str().unwrap()
)
})
.collect::<Vec<_>>()
.join("");
Ok(Response::ok(
200,
path,
format!(
"<h1>Directory Listing</h1><ul><li><a href=\"{}\">..</a></li>{}</ul>",
get_path(req.status_line.uri, "..").to_str().unwrap(),
contents_html
)
.into_bytes(),
))
} else {
Ok(Response::error(
404,
Some(format!("File <code>{}</code> does not exist", req.status_line.uri).as_str()),
))
}
}
}
|
extern crate cc;
fn main() {
cc::Build::new().file("csrc/uinput.c").compile("constants")
}
|
use num_traits::FromPrimitive;
use solana_program::{
account_info::AccountInfo, decode_error::DecodeError, entrypoint, entrypoint::ProgramResult,
msg, program_error::PrintProgramError, pubkey::Pubkey,
};
use crate::{error::BonfidaBotError, processor::Processor};
entrypoint!(process_instruction);
pub fn process_instruction(
program_id: &Pubkey,
accounts: &[AccountInfo],
instruction_data: &[u8],
) -> ProgramResult {
msg!("Entrypoint");
if let Err(error) = Processor::process_instruction(program_id, accounts, instruction_data) {
// catch the error so we can print it
error.print::<BonfidaBotError>();
return Err(error);
}
Ok(())
}
impl PrintProgramError for BonfidaBotError {
fn print<E>(&self)
where
E: 'static + std::error::Error + DecodeError<E> + PrintProgramError + FromPrimitive,
{
match self {
BonfidaBotError::InvalidInstruction => msg!("Error: Invalid instruction!"),
BonfidaBotError::Overflow => msg!("Error: Arithmetic operation overflow!"),
BonfidaBotError::LockedOperation => msg!("Error: Operation is locked in the current pool state!"),
BonfidaBotError::NotEnoughFIDA => msg!("Error: Pool must contain a minimum amount of FIDA tokens"),
BonfidaBotError::OperationTooSmall => msg!("Error: Operation was too small")
}
}
}
|
use crate::{error, NUMERICS};
use arrow::array::{Array, ArrayRef};
use arrow::datatypes::{DataType, TimeUnit};
use datafusion::common::{Result, ScalarValue};
use datafusion::logical_expr::{PartitionEvaluator, Signature, TypeSignature, Volatility};
use once_cell::sync::Lazy;
use std::borrow::Borrow;
use std::sync::Arc;
/// The name of the derivative window function.
pub(super) const NAME: &str = "derivative";
/// Valid signatures for the derivative window function.
pub(super) static SIGNATURE: Lazy<Signature> = Lazy::new(|| {
Signature::one_of(
NUMERICS
.iter()
.map(|dt| {
TypeSignature::Exact(vec![
dt.clone(),
DataType::Duration(TimeUnit::Nanosecond),
DataType::Timestamp(TimeUnit::Nanosecond, None),
])
})
.collect(),
Volatility::Immutable,
)
});
/// Calculate the return type given the function signature.
pub(super) fn return_type(_: &[DataType]) -> Result<Arc<DataType>> {
Ok(Arc::new(DataType::Float64))
}
/// Create a new partition_evaluator_factory.
pub(super) fn partition_evaluator_factory() -> Result<Box<dyn PartitionEvaluator>> {
Ok(Box::new(DifferencePartitionEvaluator {}))
}
/// PartitionEvaluator which returns the derivative between input values,
/// in the provided units.
#[derive(Debug)]
struct DifferencePartitionEvaluator {}
impl PartitionEvaluator for DifferencePartitionEvaluator {
fn evaluate_all(&mut self, values: &[ArrayRef], _num_rows: usize) -> Result<Arc<dyn Array>> {
assert_eq!(values.len(), 3);
let array = Arc::clone(&values[0]);
let times = Arc::clone(&values[2]);
// The second element of the values array is the second argument to
// the 'derivative' function. This specifies the unit duration for the
// derivation to use.
//
// INVARIANT:
// The planner guarantees that the second argument is always a duration
// literal.
let unit = ScalarValue::try_from_array(&values[1], 0)?;
let mut idx: usize = 0;
let mut last: ScalarValue = array.data_type().try_into()?;
let mut last_time: ScalarValue = times.data_type().try_into()?;
let mut derivative: Vec<ScalarValue> = vec![];
while idx < array.len() {
last = ScalarValue::try_from_array(&array, idx)?;
last_time = ScalarValue::try_from_array(×, idx)?;
derivative.push(ScalarValue::Float64(None));
idx += 1;
if !last.is_null() {
break;
}
}
while idx < array.len() {
let v = ScalarValue::try_from_array(&array, idx)?;
let t = ScalarValue::try_from_array(×, idx)?;
if v.is_null() {
derivative.push(ScalarValue::Float64(None));
} else {
derivative.push(ScalarValue::Float64(Some(
delta(&v, &last)? / delta_time(&t, &last_time, &unit)?,
)));
last = v.clone();
last_time = t.clone();
}
idx += 1;
}
Ok(Arc::new(ScalarValue::iter_to_array(derivative)?))
}
fn uses_window_frame(&self) -> bool {
false
}
fn include_rank(&self) -> bool {
false
}
}
fn delta(curr: &ScalarValue, prev: &ScalarValue) -> Result<f64> {
match (curr.borrow(), prev.borrow()) {
(ScalarValue::Float64(Some(curr)), ScalarValue::Float64(Some(prev))) => Ok(*curr - *prev),
(ScalarValue::Int64(Some(curr)), ScalarValue::Int64(Some(prev))) => {
Ok(*curr as f64 - *prev as f64)
}
(ScalarValue::UInt64(Some(curr)), ScalarValue::UInt64(Some(prev))) => {
Ok(*curr as f64 - *prev as f64)
}
_ => error::internal("derivative attempted on unsupported values"),
}
}
fn delta_time(curr: &ScalarValue, prev: &ScalarValue, unit: &ScalarValue) -> Result<f64> {
if let (
ScalarValue::TimestampNanosecond(Some(curr), _),
ScalarValue::TimestampNanosecond(Some(prev), _),
ScalarValue::IntervalMonthDayNano(Some(unit)),
) = (curr, prev, unit)
{
Ok((*curr as f64 - *prev as f64) / *unit as f64)
} else {
error::internal("derivative attempted on unsupported values")
}
}
|
use std::collections::BTreeMap;
use super::super::ir::NodeIR;
use super::graph::GraphNodeEntry;
use crate::ast;
use crate::context::{Context, NodeName};
use crate::error::{ExecBuildError, GraphCallError, GraphNodeError, Result};
use crate::execs::ExecIR;
use crate::externs::ExternIR;
use crate::graph::{Graph, RefGraph};
use crate::tensor::{IRData, TensorGraph, TensorNode};
pub trait ASTBuild<'a> {
type Args;
type Output;
fn build(self, ctx: &mut Context<'a>, args: Self::Args) -> Result<Self::Output>;
}
pub struct NodeEntry<'a, 'b>
where
'a: 'b,
{
name: NodeName,
pub graph: RefGraph,
pub ctx: &'b mut Context<'a>,
pub tensor_graph: TensorGraph,
pub last_tensor_id: u64,
}
impl<'a, 'b> NodeEntry<'a, 'b> {
fn new(name: NodeName, graph: RefGraph, ctx: &'b mut Context<'a>) -> Self {
Self {
name,
graph,
ctx,
tensor_graph: Default::default(),
last_tensor_id: 0,
}
}
fn hint_variables(&mut self, tensor_graph: &mut BTreeMap<u64, ast::GraphNode>) -> Result<()> {
let graph = self.graph.borrow();
for (&id, n) in tensor_graph.iter_mut() {
if let Some(shapes) = &mut n.shapes {
for (x, shape) in shapes.0.borrow_mut().iter_mut() {
if let Some(shape) = shape {
let out = ast::Out::new(id, x.clone());
*shape = graph.hint(&out, shape)?;
}
}
}
}
Ok(())
}
fn add_use(&mut self, name: String, u: ast::Use) -> Result<()> {
// Step 1. get the source
// Step 2. build
// Step 3. store
todo!()
}
fn add_with(&mut self, name: String, with: ast::With) -> Result<()> {
// Step 1. get the node
let mut node = self.get(&name)?;
// Step 2. apply variables
let args = {
let graph = self.graph.borrow();
with.graph
.into_iter()
.map(|(k, v)| {
let value = graph.replace_to(Some(v))?;
Ok((k, value))
})
.collect::<Result<_>>()?
};
node.apply_variables(args, false)?;
// Step 3. store
self.ctx.add_child(&self.name, node);
Ok(())
}
fn add_child(&mut self, child: ast::Node) -> Result<()> {
// Step 1. convert to file
let file = ast::File {
uses: Default::default(),
node: child,
};
// Step 2. build
let node = file.build(self.ctx, self.name.clone())?;
// Step 3. store
self.ctx.add_child(&self.name, node);
Ok(())
}
fn add_tensor_graph(&mut self, node: ast::GraphNode) -> Result<()> {
let last_id = self.last_tensor_id;
if node.id < last_id || node.id - last_id != 1 && !(last_id == 0 && node.id == 0) {
GraphNodeError::MismatchedId {
expected: last_id + 1,
given: node.id,
}
.into()
} else {
let id = node.id;
GraphNodeEntry { root: self, node }.build()?;
// store id
self.last_tensor_id = id;
Ok(())
}
}
fn build(mut self) -> NodeIR {
NodeIR {
data: IRData::with_tensor_graph(
self.name.pop().unwrap(),
self.graph,
&self.tensor_graph,
),
ty: ast::LetNodeType::Default,
tensor_graph: self.tensor_graph,
repeat: None,
}
}
pub fn get(&mut self, name: &str) -> Result<TensorNode> {
self.ctx.get(&self.name, name)
}
pub fn get_output_shapes(&self) -> Option<&ast::Shapes> {
for node in self.tensor_graph.iter().rev() {
if let Some(outputs) = node.get_output_shapes() {
return Some(outputs);
}
}
None
}
pub fn fetch_shape(&self, out: &mut ast::Out) -> Result<Option<ast::Shape>> {
for node in self.tensor_graph.iter().rev() {
// test id
let node_id = node.get_id();
if let Some(id) = &out.id {
if node_id > *id {
continue;
}
if node_id < *id {
break;
}
}
if let Some(shapes) = node.get_output_shapes() {
if let Some(shape) = shapes.0.borrow().get(&out.name) {
out.id = Some(node_id + 1);
return Ok(shape.as_ref().cloned());
}
}
}
GraphNodeError::NoSuchInput { out: out.clone() }.into()
}
}
impl<'a> ASTBuild<'a> for ast::File {
type Args = NodeName;
type Output = TensorNode;
fn build(self, ctx: &mut Context<'a>, parent: Self::Args) -> Result<Self::Output> {
if self.node.ty.is_extern() {
return Ok(ExternFile(self).build(ctx, ())?.into());
}
if self.node.ty.is_exec() {
return Ok(ExecFile(self).build(ctx, ())?.into());
}
let mut node = self.node;
let mut name = parent;
name.push(node.name.clone());
// Step 1. make a graph
let graph: RefGraph =
Graph::try_with_variables(ctx.root.seed.generate(), node.graph, false)?.into();
ctx.add_graph(name.clone(), graph.clone());
let mut entry = NodeEntry::new(name, graph, ctx);
// Step 2. import remote models
for (name, u) in self.uses {
entry.add_use(name, u)?;
}
// Step 3. hint variables with tensor graph
entry.hint_variables(&mut node.tensor_graph)?;
// Step 4. re-define nodes (with)
for (name, w) in node.withs {
entry.add_with(name, w)?;
}
// Step 5. build children nodes
for (_, child) in node.children {
entry.add_child(child)?;
}
// Step 6. make a tensor graph
for (_, n) in node.tensor_graph {
entry.add_tensor_graph(n)?;
}
// Step 7. store
Ok(entry.build().into())
}
}
struct ExternNodeEntry<'a, 'b> {
inner: NodeEntry<'a, 'b>,
ty: ast::ExternNodeType,
input: Option<ast::Shapes>,
output: Option<ast::Shapes>,
}
impl<'a, 'b> ExternNodeEntry<'a, 'b> {
fn new(inner: NodeEntry<'a, 'b>, ty: ast::ExternNodeType) -> Self {
Self {
inner,
ty,
input: None,
output: None,
}
}
fn hint_variables(&mut self, tensor_graph: &mut BTreeMap<u64, ast::GraphNode>) -> Result<()> {
self.inner.hint_variables(tensor_graph)
}
fn test_tensor_graph(&self, nodes: &BTreeMap<u64, ast::GraphNode>) -> Result<()> {
ExternTensorGraphCondition {
nodes,
names: match self.ty {
ast::ExternNodeType::Default => &["Input", "Output"],
ast::ExternNodeType::Data => &["Output"],
ast::ExternNodeType::Optim => &[],
},
ty_inputs: Some(ast::GraphInputsType::UseLast),
args: Some(&[]),
is_sized: None,
repeatable: Some(false),
is_id_zero: true,
}
.test()
}
fn add_tensor_graph(&mut self, node: ast::GraphNode) {
let target = match self.ty {
ast::ExternNodeType::Default => {
if node.id == 0 {
&mut self.input
} else {
&mut self.output
}
}
ast::ExternNodeType::Data => &mut self.output,
ast::ExternNodeType::Optim => {
unreachable!("the optim node cannot have the tensor graph")
}
};
*target = node.shapes;
}
fn build(mut self) -> NodeIR {
let name = self.inner.name.pop().unwrap();
let extern_node = ExternIR::new_first(
self.ty,
name.clone(),
self.inner.graph,
self.input,
self.output,
);
let graph = extern_node.data.graph.clone();
let tensor_graph = TensorGraph::new_one(extern_node.into());
NodeIR {
data: IRData::with_tensor_graph(name, graph, &tensor_graph),
ty: ast::LetNodeType::Extern(self.ty),
tensor_graph,
repeat: None,
}
}
}
struct ExternFile(ast::File);
impl<'a> ASTBuild<'a> for ExternFile {
type Args = ();
type Output = NodeIR;
fn build(self, ctx: &mut Context<'a>, (): Self::Args) -> Result<Self::Output> {
let file = self.0;
let mut node = file.node;
let ty = node.ty.unwrap_extern();
// Step 1. make a graph
let graph = Graph::try_with_variables(ctx.root.seed.generate(), node.graph, false)?.into();
let entry = NodeEntry::new(vec![node.name], graph, ctx);
let mut entry = ExternNodeEntry::new(entry, ty);
// Step 2. hint variables with tensor graph
entry.hint_variables(&mut node.tensor_graph)?;
// Step 3. make a tensor graph
entry.test_tensor_graph(&node.tensor_graph)?;
for (_, n) in node.tensor_graph {
entry.add_tensor_graph(n);
}
// Step 4. store
Ok(entry.build())
}
}
pub struct ExternTensorGraphCondition<'a> {
pub nodes: &'a BTreeMap<u64, ast::GraphNode>,
pub names: &'static [&'static str],
pub ty_inputs: Option<ast::GraphInputsType>,
// note: the args should be sorted
pub args: Option<&'static [&'static str]>,
pub is_sized: Option<bool>,
pub repeatable: Option<bool>,
pub is_id_zero: bool,
}
impl<'a> ExternTensorGraphCondition<'a> {
pub fn test(self) -> Result<()> {
// test the number of nodes
if self.nodes.len() != self.names.len() {
return GraphNodeError::MismatchedSize {
expected: self.names,
given: self.nodes.len(),
}
.into();
}
for (id, (name, node)) in self.names.iter().zip(self.nodes.values()).enumerate() {
self.test_each_node(&[name], id as u64, node)?;
}
Ok(())
}
fn test_each_node(
&self,
names: &[&'static str],
id: u64,
node: &'a ast::GraphNode,
) -> Result<()> {
// Step 1. test the number of calls (should be 1)
{
let given = node.calls.len();
if given != 1 {
return GraphCallError::MismatchedSize {
expected: names.to_vec(),
given,
}
.into();
}
}
// Step 2. test the node id
if self.is_id_zero && id != node.id {
return GraphNodeError::MismatchedId {
expected: id,
given: node.id,
}
.into();
}
let call = &node.calls[0];
let name = &call.name;
// Step 3. test the name
if !names.contains(&name.as_str()) {
return GraphCallError::MismatchedName {
expected: names.to_vec(),
given: name.clone(),
}
.into();
}
// Step 4. test inputs
if let Some(expected) = self.ty_inputs {
let given = call.get_inputs_ty();
if expected != given {
return GraphCallError::MismatchedInputsType { expected, given }.into();
}
}
// Step 5. test repeat
if let Some(expected) = self.repeatable {
let given = call.repeat.is_some();
if expected != given {
return GraphCallError::MismatchedRepeat { expected, given }.into();
}
}
// Step 6. test the args
if let Some(expected) = self.args {
#[cfg(feature = "test-nightly")]
{
assert!(expected.is_sorted(), "the args should be sorted");
}
// note: the keywords are already sorted according to BTreeMap.
let given = match &call.args {
Some(args) => args.keys().collect(),
None => vec![],
};
if given != expected {
return GraphCallError::MismatchedArgs {
expected,
given: given.into_iter().cloned().collect(),
}
.into();
}
}
// Step 7. test the size
if let Some(expected) = self.is_sized {
let given = node.shapes.is_some();
if expected != given {
return GraphNodeError::MismatchedShapesExistence { expected, given }.into();
}
}
Ok(())
}
}
struct ExecNodeEntry {
name: String,
graph: RefGraph,
links: Vec<Vec<String>>,
}
impl ExecNodeEntry {
fn try_new(
name: String,
graph: RefGraph,
tensor_graph: BTreeMap<u64, ast::GraphNode>,
) -> Result<Self> {
Ok(Self {
name,
graph,
links: ExecNodeEntry::get_links(tensor_graph)?,
})
}
fn get_links(tensor_graph: BTreeMap<u64, ast::GraphNode>) -> Result<Vec<Vec<String>>> {
tensor_graph
.into_iter()
.enumerate()
.map(|(expected, (given, node))| {
let expected = expected as u64 + 1;
// test id
if expected != given {
return GraphNodeError::MismatchedId { expected, given }.into();
}
// test the shape
if node.shapes.is_some() {
return GraphNodeError::UnexpectedShapes.into();
}
// test the calls
if node.calls.is_empty() {
return GraphNodeError::EmptyCalls.into();
}
node.calls
.into_iter()
.map(|call| {
if call.inputs.is_some() {
return GraphCallError::UnexpectedInputs.into();
}
if call.args.is_some() {
return GraphCallError::UnexpectedArgs.into();
}
if call.repeat.is_some() {
return GraphCallError::UnexpectedRepeat.into();
}
Ok(call.name)
})
.collect()
})
.collect()
}
fn build(self) -> ExecIR {
ExecIR {
data: IRData::with_no_shapes(self.name, self.graph),
links: self.links,
}
}
}
struct ExecFile(ast::File);
impl<'a> ASTBuild<'a> for ExecFile {
type Args = ();
type Output = ExecIR;
fn build(self, ctx: &mut Context<'a>, (): Self::Args) -> Result<Self::Output> {
let node = self.0.node;
if !node.withs.is_empty() {
return ExecBuildError::UnexpectedWiths.into();
}
if !node.children.is_empty() {
return ExecBuildError::UnexpectedChildren.into();
}
if node.tensor_graph.is_empty() {
return ExecBuildError::EmptyGraph.into();
}
// Step 1. make a graph
let graph = Graph::try_with_variables(ctx.root.seed.generate(), node.graph, true)?.into();
let entry = ExecNodeEntry::try_new(node.name, graph, node.tensor_graph)?;
// Step 2. store
Ok(entry.build())
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::model::*,
cm_rust::{ComponentDecl, ExposeDecl, UseDecl},
directory_broker::RoutingFn,
failure::format_err,
fidl::endpoints::ServerEnd,
fidl_fidl_examples_echo::{EchoMarker, EchoRequest, EchoRequestStream},
fidl_fuchsia_io::{DirectoryMarker, NodeMarker},
fidl_fuchsia_sys2 as fsys, fuchsia_async as fasync,
fuchsia_vfs_pseudo_fs::{
directory::{self, entry::DirectoryEntry},
file::simple::read_only,
},
futures::future::BoxFuture,
futures::lock::Mutex,
futures::prelude::*,
std::{
collections::{HashMap, HashSet},
convert::TryFrom,
sync::Arc,
},
};
/// Creates a routing function factory for `UseDecl` that does the following:
/// - Redirects all directory capabilities to a directory with the file "hello".
/// - Redirects all service capabilities to the echo service.
pub fn proxy_use_routing_factory() -> impl Fn(AbsoluteMoniker, UseDecl) -> RoutingFn {
move |_abs_moniker: AbsoluteMoniker, use_decl: UseDecl| new_proxy_routing_fn(use_decl.into())
}
/// Creates a routing function factory for `ExposeDecl` that does the following:
/// - Redirects all directory capabilities to a directory with the file "hello".
/// - Redirects all service capabilities to the echo service.
pub fn proxy_expose_routing_factory() -> impl Fn(AbsoluteMoniker, ExposeDecl) -> RoutingFn {
move |_abs_moniker: AbsoluteMoniker, expose_decl: ExposeDecl| {
new_proxy_routing_fn(expose_decl.into())
}
}
enum CapabilityType {
Service,
LegacyService,
Directory,
Storage,
Runner,
}
impl From<UseDecl> for CapabilityType {
fn from(use_: UseDecl) -> Self {
match use_ {
UseDecl::Service(_) => CapabilityType::Service,
UseDecl::LegacyService(_) => CapabilityType::LegacyService,
UseDecl::Directory(_) => CapabilityType::Directory,
UseDecl::Storage(_) => CapabilityType::Storage,
UseDecl::Runner(_) => CapabilityType::Runner,
}
}
}
impl From<ExposeDecl> for CapabilityType {
fn from(expose: ExposeDecl) -> Self {
match expose {
ExposeDecl::Service(_) => CapabilityType::Service,
ExposeDecl::LegacyService(_) => CapabilityType::LegacyService,
ExposeDecl::Directory(_) => CapabilityType::Directory,
ExposeDecl::Runner(_) => CapabilityType::Runner,
}
}
}
fn new_proxy_routing_fn(ty: CapabilityType) -> RoutingFn {
Box::new(
move |flags: u32, mode: u32, relative_path: String, server_end: ServerEnd<NodeMarker>| {
match ty {
CapabilityType::Service => panic!("service capability unsupported"),
CapabilityType::LegacyService => {
fasync::spawn(async move {
let server_end: ServerEnd<EchoMarker> =
ServerEnd::new(server_end.into_channel());
let mut stream: EchoRequestStream = server_end.into_stream().unwrap();
while let Some(EchoRequest::EchoString { value, responder }) =
stream.try_next().await.unwrap()
{
responder.send(value.as_ref().map(|s| &**s)).unwrap();
}
});
}
CapabilityType::Directory | CapabilityType::Storage => {
let mut sub_dir = directory::simple::empty();
sub_dir
.add_entry("hello", { read_only(move || Ok(b"friend".to_vec())) })
.map_err(|(s, _)| s)
.expect("Failed to add 'hello' entry");
sub_dir.open(flags, mode, &mut relative_path.split("/"), server_end);
fasync::spawn(async move {
let _ = sub_dir.await;
});
}
CapabilityType::Runner => {
// TODO(fxb/4761): Implement routing for runner caps.
panic!("runner capability unsupported");
}
}
},
)
}
pub struct MockResolver {
components: HashMap<String, ComponentDecl>,
}
impl MockResolver {
pub fn new() -> Self {
MockResolver { components: HashMap::new() }
}
async fn resolve_async(&self, component_url: String) -> Result<fsys::Component, ResolverError> {
const NAME_PREFIX: &str = "test:///";
debug_assert!(component_url.starts_with(NAME_PREFIX), "invalid component url");
let (_, name) = component_url.split_at(NAME_PREFIX.len());
let decl = self.components.get(name).ok_or(ResolverError::component_not_available(
name.to_string(),
format_err!("not in the hashmap"),
))?;
let fsys_decl =
fsys::ComponentDecl::try_from(decl.clone()).expect("decl failed conversion");
Ok(fsys::Component {
resolved_url: Some(format!("test:///{}_resolved", name)),
decl: Some(fsys_decl),
package: None,
})
}
pub fn add_component(&mut self, name: &str, component: ComponentDecl) {
self.components.insert(name.to_string(), component);
}
}
impl Resolver for MockResolver {
fn resolve<'a>(&'a self, component_url: &'a str) -> ResolverFut<'a> {
Box::pin(self.resolve_async(component_url.to_string()))
}
}
pub struct MockRunner {
pub urls_run: Arc<Mutex<Vec<String>>>,
pub namespaces: Namespaces,
pub host_fns: HashMap<String, Box<dyn Fn(ServerEnd<DirectoryMarker>) + Send + Sync>>,
pub runtime_host_fns: HashMap<String, Box<dyn Fn(ServerEnd<DirectoryMarker>) + Send + Sync>>,
failing_urls: HashSet<String>,
}
pub type Namespaces = Arc<Mutex<HashMap<String, fsys::ComponentNamespace>>>;
impl MockRunner {
pub fn new() -> Self {
MockRunner {
urls_run: Arc::new(Mutex::new(vec![])),
namespaces: Arc::new(Mutex::new(HashMap::new())),
host_fns: HashMap::new(),
runtime_host_fns: HashMap::new(),
failing_urls: HashSet::new(),
}
}
pub fn cause_failure(&mut self, name: &str) {
self.failing_urls.insert(format!("test:///{}_resolved", name));
}
async fn start_async(&self, start_info: fsys::ComponentStartInfo) -> Result<(), RunnerError> {
let resolved_url = start_info.resolved_url.unwrap();
if self.failing_urls.contains(&resolved_url) {
return Err(RunnerError::component_launch_error(resolved_url, format_err!("ouch")));
}
self.urls_run.lock().await.push(resolved_url.clone());
self.namespaces.lock().await.insert(resolved_url.clone(), start_info.ns.unwrap());
// If no host_fn was provided, then start_info.outgoing_dir will be
// automatically closed once it goes out of scope at the end of this
// function.
let host_fn = self.host_fns.get(&resolved_url);
if let Some(host_fn) = host_fn {
host_fn(start_info.outgoing_dir.unwrap());
}
let runtime_host_fn = self.runtime_host_fns.get(&resolved_url);
if let Some(runtime_host_fn) = runtime_host_fn {
runtime_host_fn(start_info.runtime_dir.unwrap());
}
Ok(())
}
}
impl Runner for MockRunner {
fn start(&self, start_info: fsys::ComponentStartInfo) -> BoxFuture<Result<(), RunnerError>> {
Box::pin(self.start_async(start_info))
}
}
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Traits for physical query plan, supporting parallel execution for partitioned relations.
use arrow::datatypes::Schema;
use arrow::record_batch::RecordBatch;
use std::sync::Arc;
use crate::error::Result;
/// Partition-aware execution plan for a relation
pub trait ExecutionPlan {
/// Get the schema for this execution plan
fn schema(&self) -> Arc<Schema>;
/// Get the partitions for this execution plan. Each partition can be executed in parallel.
fn partitions(&self) -> Result<Vec<Arc<Partition>>>;
}
/// Represents a partition of an execution plan that can be executed on a thread
pub trait Partition: Send + Sync {
/// Execute this partition and return an iterator over RecordBatch
fn execute(&self) -> Result<Arc<BatchIterator>>;
}
/// Iterator over RecordBatch that can be sent between threads
pub trait BatchIterator: Send + Sync {
/// Get the next RecordBatch
fn next(&self) -> Result<Option<RecordBatch>>;
}
|
#[macro_use]
mod macros;
pub mod constants;
pub mod functions;
|
#[macro_use]
extern crate clap;
// use jemallocator as our allocator
extern crate jemallocator;
use jemallocator::Jemalloc;
#[global_allocator]
static GLOBAL: Jemalloc = Jemalloc;
pub(crate) mod app_state;
pub(crate) mod collections;
pub(crate) mod queries;
#[cfg(test)]
mod tests;
use crate::app_state::AppState;
use crate::collections::FnCache;
use crate::queries::functions::*;
use crate::queries::make_fn_cache;
use actix_web::Responder;
use actix_web::{
error::ErrorInternalServerError, middleware::cors::Cors, middleware::Logger, server, App,
HttpRequest, Result,
};
use fn_search_backend::get_config;
use fn_search_backend_db::utils::get_db_url;
use percent_encoding::percent_decode;
use r2d2::Pool;
use r2d2_diesel::ConnectionManager;
use std::sync::Arc;
fn search(req: &HttpRequest<AppState>) -> Result<impl Responder> {
let sig: String = req.match_info().query("type_signature")?;
let sig = percent_decode(sig.as_bytes()).decode_utf8()?.to_string();
let cache: Arc<FnCache> = req.state().get_fn_cache();
let conn = req
.state()
.db_conn()
.map_err(|e| ErrorInternalServerError(e))?;
let res = (*cache).search(sig.as_str(), 10, None);
Ok(match res {
Some(ids) => {
let funcs = get_functions(&conn, ids).map_err(|e| ErrorInternalServerError(e))?;
serde_json::to_string(funcs.as_slice())?
}
None => String::from("[]"),
})
}
fn suggest(req: &HttpRequest<AppState>) -> Result<impl Responder> {
let sig: String = req.match_info().query("type_signature")?;
let sig = percent_decode(sig.as_bytes()).decode_utf8()?.to_string();
let cache: Arc<FnCache> = req.state().get_fn_cache();
let res = (*cache).suggest(sig.as_str(), 10);
Ok(match res {
Some(sigs) => serde_json::to_string(sigs.as_slice())?,
None => String::from("[]"),
})
}
fn update_fns(req: &HttpRequest<AppState>) -> Result<impl Responder> {
let sigs = {
let conn = req
.state()
.db_conn()
.map_err(|e| ErrorInternalServerError(e))?;
get_all_func_sigs(&(*conn)).map_err(|e| ErrorInternalServerError(e))?
}; // database connection goes out of scope, returning to pool
let fn_cache: FnCache = sigs.into_iter().collect();
req.state().update_fn_cache(fn_cache);
Ok("OK")
}
fn main() {
let matches: clap::ArgMatches = clap_app!(fn_search_backend_web =>
(version: crate_version!())
(author: crate_authors!())
(about: crate_description!())
(@arg CONFIG: -c --config +takes_value +required "configuration file")
(@arg VERBOSITY: -v +multiple "Sets verbosity level.\n-v : error\n-vv : info\n-vvv : debug")
)
.get_matches();
let log_str = match matches.occurrences_of("VERBOSITY") {
1 => "actix_web=error",
2 => "actix_web=info",
3 => "actix_web=debug",
0 => "",
_ => panic!("unknown log level"),
};
if log_str != "" {
std::env::set_var("RUST_LOG", log_str);
}
env_logger::init();
let cfg_file = matches
.value_of("CONFIG")
.expect("error parsing configuration file");
let cfg = get_config(&cfg_file).expect("error loading configuration file");
let cfg = Arc::new(cfg);
let pool = Pool::builder()
.max_size(cfg.web.db_pool_size)
.build(ConnectionManager::new(get_db_url(&cfg.clone().db)))
.expect("error setting up database connection");
let fn_cache = make_fn_cache(&*pool.get().expect("error connecting to database"));
let cache = Arc::new(fn_cache.expect("error retrieving function type signatures"));
let cfg_clone = cfg.clone();
server::new(move || {
App::with_state(AppState::new(pool.clone(), cache.clone())).configure(|app| {
Cors::for_app(app)
.allowed_origin(&cfg.web.allowed_origin)
.resource("/search/{type_signature}", |r| r.f(search))
.resource("/suggest/{type_signature}", |r| r.f(suggest))
.resource("/update_functions", |r| r.f(update_fns))
.register()
.middleware(Logger::default())
})
})
.bind(&cfg_clone.web.bind_address)
.unwrap()
.run();
}
|
fn tak(a: &Vec<(usize, usize)>) -> usize {
let mut a = a.iter().cloned().collect::<Vec<_>>();
a.sort_by_key(|p| p.1);
let mut rr = 0;
let mut ans = 0;
for (l, r) in a {
if rr < l {
rr = r;
ans += 1;
}
}
ans
}
fn aok(a: &Vec<(usize, usize)>) -> usize {
let mut a = a.iter().cloned().collect::<Vec<_>>();
a.sort_by_key(|p| p.0);
let mut rr = 0;
let mut ans = 0;
for (l, r) in a {
if rr < l {
ans += 1;
}
rr = std::cmp::max(rr, r);
}
ans
}
fn main() {
let stdin = std::io::stdin();
let mut rd = ProconReader::new(stdin.lock());
let n: i32 = rd.get();
let m: i32 = rd.get();
if n == 1 && m == 0 {
println!("1 2");
return;
}
if m < 0 || m >= n - 1 {
println!("-1");
return;
}
let n = n as usize;
let m = m as usize;
let mut ans = vec![];
for i in 0..(n - m - 1) {
let l = i + 1;
let r = 1_000_000_000 - l;
ans.push((l, r));
}
for i in 0..(m + 1) {
let l = i * 2 + 1 + 5_000_000;
let r = l + 1;
ans.push((l, r));
}
let ta = tak(&ans);
let ao = aok(&ans);
assert_eq!(ta - ao, m);
println!(
"{}",
ans.iter()
.map(|(l, r)| format!("{} {}", l, r))
.collect::<Vec<_>>()
.join("\n")
);
}
pub struct ProconReader<R: std::io::Read> {
reader: R,
}
impl<R: std::io::Read> ProconReader<R> {
pub fn new(reader: R) -> Self {
Self { reader }
}
pub fn get<T: std::str::FromStr>(&mut self) -> T {
use std::io::Read;
let buf = self
.reader
.by_ref()
.bytes()
.map(|b| b.unwrap())
.skip_while(|&byte| byte == b' ' || byte == b'\n' || byte == b'\r')
.take_while(|&byte| byte != b' ' && byte != b'\n' && byte != b'\r')
.collect::<Vec<_>>();
std::str::from_utf8(&buf)
.unwrap()
.parse()
.ok()
.expect("Parse Error.")
}
}
|
extern crate rust_ctrt1;
#[test]
fn utils1() {
use rust_ctrt1::utils::ctrt1_abs;
assert_eq!(ctrt1_abs(1), 1);
assert_eq!(ctrt1_abs(0), 0);
assert_eq!(ctrt1_abs(-2), 2);
}
|
use std::{fmt, marker::PhantomData};
use indexmap::IndexMap;
use serde::{
de::{self, Deserializer, IntoDeserializer as _},
ser::{SerializeMap as _, Serializer},
serde_if_integer128, Deserialize, Serialize,
};
use crate::{
ast::InputValue,
executor::ExecutionError,
parser::{ParseError, SourcePosition, Spanning},
validation::RuleError,
DefaultScalarValue, GraphQLError, Object, Value,
};
impl<T: Serialize> Serialize for ExecutionError<T> {
fn serialize<S: Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
let mut map = ser.serialize_map(Some(4))?;
map.serialize_key("message")?;
map.serialize_value(self.error().message())?;
let locations = vec![self.location()];
map.serialize_key("locations")?;
map.serialize_value(&locations)?;
map.serialize_key("path")?;
map.serialize_value(self.path())?;
if !self.error().extensions().is_null() {
map.serialize_key("extensions")?;
map.serialize_value(self.error().extensions())?;
}
map.end()
}
}
impl Serialize for GraphQLError {
fn serialize<S: Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
#[derive(Serialize)]
struct Helper {
message: &'static str,
}
match self {
Self::ParseError(e) => [e].serialize(ser),
Self::ValidationError(es) => es.serialize(ser),
Self::NoOperationProvided => [Helper {
message: "Must provide an operation",
}]
.serialize(ser),
Self::MultipleOperationsProvided => [Helper {
message: "Must provide operation name \
if query contains multiple operations",
}]
.serialize(ser),
Self::UnknownOperationName => [Helper {
message: "Unknown operation",
}]
.serialize(ser),
Self::IsSubscription => [Helper {
message: "Expected query, got subscription",
}]
.serialize(ser),
Self::NotSubscription => [Helper {
message: "Expected subscription, got query",
}]
.serialize(ser),
}
}
}
impl<'de, S: Deserialize<'de>> Deserialize<'de> for InputValue<S> {
fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
struct Visitor<S: ?Sized>(PhantomData<S>);
impl<'de, S: Deserialize<'de>> de::Visitor<'de> for Visitor<S> {
type Value = InputValue<S>;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("a valid input value")
}
fn visit_bool<E: de::Error>(self, b: bool) -> Result<Self::Value, E> {
S::deserialize(b.into_deserializer()).map(InputValue::Scalar)
}
fn visit_i8<E: de::Error>(self, n: i8) -> Result<Self::Value, E> {
S::deserialize(n.into_deserializer()).map(InputValue::Scalar)
}
fn visit_i16<E: de::Error>(self, n: i16) -> Result<Self::Value, E> {
S::deserialize(n.into_deserializer()).map(InputValue::Scalar)
}
fn visit_i32<E: de::Error>(self, n: i32) -> Result<Self::Value, E> {
S::deserialize(n.into_deserializer()).map(InputValue::Scalar)
}
fn visit_i64<E: de::Error>(self, n: i64) -> Result<Self::Value, E> {
S::deserialize(n.into_deserializer()).map(InputValue::Scalar)
}
serde_if_integer128! {
fn visit_i128<E: de::Error>(self, n: i128) -> Result<Self::Value, E> {
S::deserialize(n.into_deserializer()).map(InputValue::Scalar)
}
}
fn visit_u8<E: de::Error>(self, n: u8) -> Result<Self::Value, E> {
S::deserialize(n.into_deserializer()).map(InputValue::Scalar)
}
fn visit_u16<E: de::Error>(self, n: u16) -> Result<Self::Value, E> {
S::deserialize(n.into_deserializer()).map(InputValue::Scalar)
}
fn visit_u32<E: de::Error>(self, n: u32) -> Result<Self::Value, E> {
S::deserialize(n.into_deserializer()).map(InputValue::Scalar)
}
fn visit_u64<E: de::Error>(self, n: u64) -> Result<Self::Value, E> {
S::deserialize(n.into_deserializer()).map(InputValue::Scalar)
}
serde_if_integer128! {
fn visit_u128<E: de::Error>(self, n: u128) -> Result<Self::Value, E> {
S::deserialize(n.into_deserializer()).map(InputValue::Scalar)
}
}
fn visit_f32<E: de::Error>(self, n: f32) -> Result<Self::Value, E> {
S::deserialize(n.into_deserializer()).map(InputValue::Scalar)
}
fn visit_f64<E: de::Error>(self, n: f64) -> Result<Self::Value, E> {
S::deserialize(n.into_deserializer()).map(InputValue::Scalar)
}
fn visit_char<E: de::Error>(self, c: char) -> Result<Self::Value, E> {
S::deserialize(c.into_deserializer()).map(InputValue::Scalar)
}
fn visit_str<E: de::Error>(self, s: &str) -> Result<Self::Value, E> {
S::deserialize(s.into_deserializer()).map(InputValue::Scalar)
}
fn visit_string<E: de::Error>(self, s: String) -> Result<Self::Value, E> {
S::deserialize(s.into_deserializer()).map(InputValue::Scalar)
}
fn visit_bytes<E: de::Error>(self, b: &[u8]) -> Result<Self::Value, E> {
S::deserialize(b.into_deserializer()).map(InputValue::Scalar)
}
fn visit_byte_buf<E: de::Error>(self, b: Vec<u8>) -> Result<Self::Value, E> {
S::deserialize(b.into_deserializer()).map(InputValue::Scalar)
}
fn visit_none<E: de::Error>(self) -> Result<Self::Value, E> {
Ok(InputValue::Null)
}
fn visit_unit<E: de::Error>(self) -> Result<Self::Value, E> {
Ok(InputValue::Null)
}
fn visit_seq<V>(self, mut visitor: V) -> Result<Self::Value, V::Error>
where
V: de::SeqAccess<'de>,
{
let mut vals = Vec::new();
while let Some(v) = visitor.next_element()? {
vals.push(v);
}
Ok(InputValue::list(vals))
}
fn visit_map<V>(self, mut visitor: V) -> Result<Self::Value, V::Error>
where
V: de::MapAccess<'de>,
{
let mut obj = IndexMap::<String, InputValue<S>>::with_capacity(
visitor.size_hint().unwrap_or(0),
);
while let Some((key, val)) = visitor.next_entry()? {
obj.insert(key, val);
}
Ok(InputValue::object(obj))
}
}
de.deserialize_any(Visitor(PhantomData))
}
}
impl<T: Serialize> Serialize for InputValue<T> {
fn serialize<S: Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
match self {
Self::Null | Self::Variable(_) => ser.serialize_unit(),
Self::Scalar(s) => s.serialize(ser),
Self::Enum(e) => ser.serialize_str(e),
Self::List(l) => l.iter().map(|x| &x.item).collect::<Vec<_>>().serialize(ser),
Self::Object(o) => o
.iter()
.map(|(k, v)| (k.item.as_str(), &v.item))
.collect::<IndexMap<_, _>>()
.serialize(ser),
}
}
}
impl Serialize for RuleError {
fn serialize<S: Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
let mut map = ser.serialize_map(Some(2))?;
map.serialize_key("message")?;
map.serialize_value(self.message())?;
map.serialize_key("locations")?;
map.serialize_value(self.locations())?;
map.end()
}
}
impl Serialize for SourcePosition {
fn serialize<S: Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
let mut map = ser.serialize_map(Some(2))?;
let line = self.line() + 1;
map.serialize_key("line")?;
map.serialize_value(&line)?;
let column = self.column() + 1;
map.serialize_key("column")?;
map.serialize_value(&column)?;
map.end()
}
}
impl Serialize for Spanning<ParseError> {
fn serialize<S: Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
let mut map = ser.serialize_map(Some(2))?;
let msg = self.item.to_string();
map.serialize_key("message")?;
map.serialize_value(&msg)?;
let mut loc = IndexMap::new();
loc.insert("line".to_owned(), self.start.line() + 1);
loc.insert("column".to_owned(), self.start.column() + 1);
let locations = vec![loc];
map.serialize_key("locations")?;
map.serialize_value(&locations)?;
map.end()
}
}
impl<T: Serialize> Serialize for Object<T> {
fn serialize<S: Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
let mut map = ser.serialize_map(Some(self.field_count()))?;
for (f, v) in self.iter() {
map.serialize_key(f)?;
map.serialize_value(v)?;
}
map.end()
}
}
impl<T: Serialize> Serialize for Value<T> {
fn serialize<S: Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
match self {
Self::Null => ser.serialize_unit(),
Self::Scalar(s) => s.serialize(ser),
Self::List(l) => l.serialize(ser),
Self::Object(o) => o.serialize(ser),
}
}
}
impl<'de> Deserialize<'de> for DefaultScalarValue {
fn deserialize<D: Deserializer<'de>>(de: D) -> Result<Self, D::Error> {
struct Visitor;
impl<'de> de::Visitor<'de> for Visitor {
type Value = DefaultScalarValue;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("a valid input value")
}
fn visit_bool<E: de::Error>(self, b: bool) -> Result<Self::Value, E> {
Ok(DefaultScalarValue::Boolean(b))
}
fn visit_i64<E: de::Error>(self, n: i64) -> Result<Self::Value, E> {
if n >= i64::from(i32::MIN) && n <= i64::from(i32::MAX) {
Ok(DefaultScalarValue::Int(n.try_into().unwrap()))
} else {
// Browser's `JSON.stringify()` serializes all numbers
// having no fractional part as integers (no decimal point),
// so we must parse large integers as floating point,
// otherwise we would error on transferring large floating
// point numbers.
// TODO: Use `FloatToInt` conversion once stabilized:
// https://github.com/rust-lang/rust/issues/67057
Ok(DefaultScalarValue::Float(n as f64))
}
}
fn visit_u64<E: de::Error>(self, n: u64) -> Result<Self::Value, E> {
if n <= u64::try_from(i32::MAX).unwrap() {
self.visit_i64(n.try_into().unwrap())
} else {
// Browser's `JSON.stringify()` serializes all numbers
// having no fractional part as integers (no decimal point),
// so we must parse large integers as floating point,
// otherwise we would error on transferring large floating
// point numbers.
// TODO: Use `FloatToInt` conversion once stabilized:
// https://github.com/rust-lang/rust/issues/67057
Ok(DefaultScalarValue::Float(n as f64))
}
}
fn visit_f64<E: de::Error>(self, f: f64) -> Result<Self::Value, E> {
Ok(DefaultScalarValue::Float(f))
}
fn visit_str<E: de::Error>(self, s: &str) -> Result<Self::Value, E> {
self.visit_string(s.into())
}
fn visit_string<E: de::Error>(self, s: String) -> Result<Self::Value, E> {
Ok(DefaultScalarValue::String(s))
}
}
de.deserialize_any(Visitor)
}
}
#[cfg(test)]
mod tests {
use serde_json::{from_str, to_string};
use crate::{
ast::InputValue,
graphql_input_value,
value::{DefaultScalarValue, Object},
FieldError, Value,
};
use super::{ExecutionError, GraphQLError};
#[test]
fn int() {
assert_eq!(
from_str::<InputValue>("1235").unwrap(),
graphql_input_value!(1235),
);
}
#[test]
fn float() {
assert_eq!(
from_str::<InputValue>("2.0").unwrap(),
graphql_input_value!(2.0),
);
// large value without a decimal part is also float
assert_eq!(
from_str::<InputValue>("123567890123").unwrap(),
graphql_input_value!(123_567_890_123.0),
);
}
#[test]
fn errors() {
assert_eq!(
to_string(&GraphQLError::UnknownOperationName).unwrap(),
r#"[{"message":"Unknown operation"}]"#,
);
}
#[test]
fn error_extensions() {
let mut obj: Object<DefaultScalarValue> = Object::with_capacity(1);
obj.add_field("foo", Value::scalar("bar"));
assert_eq!(
to_string(&ExecutionError::at_origin(FieldError::new(
"foo error",
Value::Object(obj),
)))
.unwrap(),
r#"{"message":"foo error","locations":[{"line":1,"column":1}],"path":[],"extensions":{"foo":"bar"}}"#,
);
}
}
|
/*
value are supposed to be able to convert from u32
*/
const N: usize = 3;
#[derive(Clone, Copy, Eq, PartialEq)]
struct Matrix<T>
where
T: Copy + Clone,
{
val: [[T; N]; N],
}
// Debug Display
impl<T> std::fmt::Debug for Matrix<T>
where
T: std::fmt::Debug + Copy + Clone,
{
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let mut res = String::new();
for i in 0..N {
res = format!("{}{:?}\n", res, self.val[i]);
}
write!(f, "{}", res)
}
}
// constructors
#[allow(dead_code)]
impl<T> Matrix<T>
where
T: From<i32> + Copy + Clone,
{
fn i() -> Self {
let mut val: [[T; N]; N] = [[T::from(0i32); N]; N];
for i in 0..N {
val[i][i] = T::from(1i32);
}
Matrix { val: val }
}
fn zero() -> Self {
let val: [[T; N]; N] = [[T::from(0i32); N]; N];
Matrix { val: val }
}
// generate permutation matrix
fn permutation(pi: usize, pj: usize) -> Self {
let mut val: [[T; N]; N] = [[T::from(0i32); N]; N];
for i in 0..N {
if i == pi {
val[i][pj] = T::from(1i32);
} else if i == pj {
val[i][pi] = T::from(1i32);
} else {
val[i][i] = T::from(1i32);
}
}
Matrix { val: val }
}
}
impl<T: Copy + Clone> From<[[T; N]; N]> for Matrix<T> {
fn from(m: [[T; N]; N]) -> Self {
Matrix { val: m }
}
}
#[test]
fn check_gen() {
println!("{:?}", Matrix::<i64>::i());
let m = [[1, 2, 34], [1, 2, 34], [1, 2, 34]];
println!("{:?}", Matrix::from(m));
}
// methods
#[allow(dead_code)]
impl<T> Matrix<T>
where
T: From<i32> + Copy + Clone,
{
fn transpose(&self) -> Self {
let mut tval: [[T; N]; N] = [[T::from(0i32); N]; N];
for i in 0..N {
for j in i..N {
tval[j][i] = self.val[i][j];
tval[i][j] = self.val[j][i];
}
}
Matrix { val: tval }
}
}
#[test]
fn check_permutation() {
let m: Matrix<i64> = Matrix::from([[3, 2, 1], [1, 4, 5], [1, 2, 3]]);
let p12: Matrix<i64> = Matrix::permutation(1, 2);
println!("m=\n{:?}", m);
println!("p=\n{:?}", p12);
// 右から掛けると列交換
println!("m*p=\n{:?}", m * p12);
let m: Matrix<i64> = Matrix::from([[3, 2, 1], [1, 4, 5], [1, 2, 3]]);
let p12: Matrix<i64> = Matrix::permutation(1, 2);
// 左から掛けると行交換
println!("p*m=\n{:?}", p12 * m);
}
// operations
impl<T> std::ops::Neg for Matrix<T>
where
T: std::ops::Neg<Output = T> + From<i32> + Copy + Clone,
{
type Output = Matrix<T>;
fn neg(self) -> Self::Output {
let mut res = [[T::from(0i32); N]; N];
for i in 0..N {
for j in 0..N {
res[i][j] = -self.val[i][j];
}
}
Matrix { val: res }
}
}
// component wise
impl<T> std::ops::Add<Matrix<T>> for Matrix<T>
where
T: std::ops::Add<Output = T> + std::fmt::Debug + From<i32> + Copy + Clone,
{
type Output = Matrix<T>;
fn add(self, rhs: Matrix<T>) -> Self::Output {
let mut res = [[T::from(0i32); N]; N];
for i in 0..N {
for j in 0..N {
res[i][j] = self.val[i][j] + rhs.val[i][j];
}
}
Matrix { val: res }
}
}
#[test]
fn check_ops() {
println!("{:?}", Matrix::<i64>::i() + Matrix::<i64>::i());
println!("{:?}", Matrix::<i64>::i() - Matrix::<i64>::i());
println!("{:?}", Matrix::<i64>::i() * Matrix::<i64>::i());
let m1 = Matrix::from([[3, 2, 1], [1, 4, 5], [1, 2, 3]]);
let m2 = Matrix::from([[1, 2, 3], [3, 2, 1], [1, 3, 2]]);
println!("{:?}", m1 * m2);
println!("{:?}", m1 * m2);
}
// component wise
impl<T> std::ops::Sub<Matrix<T>> for Matrix<T>
where
T: std::ops::Sub<Output = T> + std::fmt::Debug + From<i32> + Copy + Clone,
{
type Output = Matrix<T>;
fn sub(self, rhs: Matrix<T>) -> Self::Output {
let mut res = [[T::from(0i32); N]; N];
for i in 0..N {
for j in 0..N {
res[i][j] = self.val[i][j] - rhs.val[i][j];
}
}
Matrix { val: res }
}
}
// dot product
// i*j x j * k
impl<T> std::ops::Mul<Matrix<T>> for Matrix<T>
where
T: std::ops::Add<Output = T>
+ std::ops::Mul<Output = T>
+ std::fmt::Debug
+ From<i32>
+ Copy
+ Clone,
{
type Output = Matrix<T>;
fn mul(self, rhs: Matrix<T>) -> Self::Output {
let mut res = [[T::from(0i32); N]; N];
for i in 0..N {
for j in 0..N {
for k in 0..N {
res[i][k] = res[i][k] + (self.val[i][j] * rhs.val[j][k]);
}
}
}
Matrix { val: res }
}
}
fn main() {}
#[test]
fn check_complex() {
let i = Complex::i();
let cm: Matrix<Complex> = Matrix::from([
[i, Complex::zero(), Complex::zero()],
[Complex::zero(), i, Complex::zero()],
[Complex::zero(), Complex::zero(), i],
]);
println!("{:?}", cm * cm);
}
|
#[derive(Debug)]
struct Philosopher {
name: String,
}
impl Philosopher {
fn new(name: &str) -> Philosopher {
Philosopher {
name: name.to_string(),
}
}
fn name(&self) {
println!("philosopher is {}", self.name);
}
}
fn main() {
let philosophers = vec![
Philosopher::new("hogehoge"),
Philosopher::new("hugahuga"),
Philosopher::new("piyopiyo"),
];
for p in philosophers {
p.name();
}
}
|
//! A library for serializing metric data into InfluxData's Line Protocol for
//! ingestion into influxdb.
use std::string::ToString;
use std::time::Duration;
pub use segment_derive::*;
#[macro_export]
/// Serialize tag, and field, values to the provided String buffer.
///
/// This macro specifically handles serialization by data type and line proto
/// type (tag vs field).
///
/// segment-derive uses this macro while generating the Metric::build(..)
/// method for the metric type. During the generation of that function information
/// about the field/tag's underlying data type is included so that optimal
/// serialization can be performed, along with escaping for strings, etc.
///
/// This macro is not intended to be used outside of segment-derive, but it is
/// not impossible to use it to create a custom metric that does not use
/// segment-derive to generate the Metric implementation.
macro_rules! segment_write {
// Type suffix for influx integers, etc.
( @type_suff, $b:ident, f32, $i:ident ) => { };
( @type_suff, $b:ident, f64, $i:ident ) => { };
( @type_suff, $b:ident, $t:tt, tag) => { };
( @type_suff, $b:ident, $t:tt, field ) => { $b.push('i'); };
// Determine serialization for numerics
( @num_ser, f32, $b:ident, $($i:ident).+ ) => { dtoa::write(&mut $b, $($i).*)?; };
( @num_ser, f64, $b:ident, $($i:ident).+ ) => { dtoa::write(&mut $b, $($i).*)?; };
( @num_ser, $t:tt, $b:ident, $($i:ident).+) => { itoa::write(&mut $b, $($i).*)?; };
// Serialization for string types
( @str_ser, $b:ident, $val:expr, tag ) => {
segment::build_escapedtagstr($val, $b);
};
( @str_ser, $b:ident, $val:expr, field ) => {
segment::build_escapedfieldstr($val, $b);
};
// Main Entry
( $b:ident, $($i:ident).+, String, $lf:ident ) => { segment::segment_write!(@str_ser, $b, &$($i).*, $lf); };
( $b:ident, $($i:ident).+, &str, $lf:ident ) => { segment::segment_write!(@str_ser, $b, $($i).*, $lf); };
( $b:ident, $($i:ident).+, &'static str, $lf:ident ) => { segment::segment_write!(@str_ser, $b, $($i).*, $lf) };
( $b:ident, $($i:ident).+, $t:tt, $lf:ident ) => {
unsafe {
let mut bytes = $b.as_mut_vec();
segment::segment_write!(@num_ser, $t, bytes, $($i).*);
}
segment::segment_write!(@type_suff, $b, $t, $lf);
};
}
/// Contains the value (and type) of a metric field.
pub enum FieldValue {
Str(String),
UInt32(u32),
UInt64(u64),
Int32(i32),
Int64(i64),
Float32(f32),
Float64(f64),
}
impl FieldValue {
pub fn build(&self, sb: &mut String) {
// NOTE: The unsafe sections below are unsafe due to manipulation of
// the Vec returned by String::as_mut_vec(), since there is no
// UTF8 validation. itoa, and dtoa, write in UTF8 compatible
// encoding, so the unsafes are safe.
match self {
FieldValue::Str(s) => {
sb.push('"');
build_escapedfieldstr(s, sb);
sb.push('"');
},
FieldValue::UInt32(u) => {
unsafe {
let mut bytes = sb.as_mut_vec();
let _ = itoa::write(&mut bytes, *u).expect("cannot write u32");
}
sb.push('i');
},
FieldValue::UInt64(u) => {
unsafe {
let mut bytes = sb.as_mut_vec();
let _ = itoa::write(&mut bytes, *u).expect("cannot write u64");
}
sb.push('i');
},
FieldValue::Int32(i) => {
unsafe {
let mut bytes = sb.as_mut_vec();
let _ = itoa::write(&mut bytes, *i).expect("cannot write i32");
}
sb.push('i');
},
FieldValue::Int64(i) => {
unsafe {
let mut bytes = sb.as_mut_vec();
let _ = itoa::write(&mut bytes, *i).expect("cannot write i64");
}
sb.push('i');
},
FieldValue::Float32(fl) => {
unsafe {
let mut bytes = sb.as_mut_vec();
let _ = dtoa::write(&mut bytes, *fl).expect("cannot write f32");
}
},
FieldValue::Float64(fl) => {
unsafe {
let mut bytes = sb.as_mut_vec();
let _ = dtoa::write(&mut bytes, *fl).expect("cannot write f64");
}
}
};
}
}
impl ToString for FieldValue {
fn to_string(&self) -> String {
let mut sret = String::new();
self.build(&mut sret);
sret
}
}
impl From<String> for FieldValue {
#[inline]
fn from(item: String) -> Self {
FieldValue::Str(item)
}
}
impl From<&str> for FieldValue {
#[inline]
fn from(item: &str) -> Self {
FieldValue::Str(String::from(item))
}
}
impl From<u32> for FieldValue {
#[inline]
fn from(item: u32) -> Self {
FieldValue::UInt32(item)
}
}
impl From<u64> for FieldValue {
#[inline]
fn from(item: u64) -> Self {
FieldValue::UInt64(item)
}
}
impl From<f32> for FieldValue {
#[inline]
fn from(item: f32) -> Self {
FieldValue::Float32(item)
}
}
impl From<f64> for FieldValue {
#[inline]
fn from(item: f64) -> Self {
FieldValue::Float64(item)
}
}
/// A key/value pair destined for becoming a Line Protocol Field.
pub struct Field {
/// The name or key of the field.
pub name: String,
/// The value of the field.
pub value: FieldValue,
}
/// A key/value pair destined for becoming a Line Protocol Tag.
pub struct Tag {
/// The name/key of the tag.
pub name: String,
/// The value of the tag.
pub value: String,
}
/// A metric represents a single point in a measurement.
pub trait Metric {
fn time(&self) -> Duration;
fn measurement(&self) -> String;
fn fields(&self) -> Vec<Field>;
fn tags(&self) -> Vec<Tag>;
fn to_lineproto(&self) -> String;
fn build(&self, buffer: &mut String) -> std::io::Result<usize>;
}
// measurement[,tag=val[,tag=val]] field=value[,field=value]
/// Escapes the provided tag value `s` and adds the newly escaped values to `buff`.
/// > NOTE: Source for this is an adaptation from std::String::replace
pub fn build_escapedtagstr(s: &str, buff: &mut String) {
let mut last_end = 0;
let matcher = |c: char| match c {
'\n' | '"' | '.' | ' ' | '=' => true,
_ => false,
};
for (start, part) in s.match_indices(matcher) {
buff.push_str(unsafe { s.get_unchecked(last_end..start) });
match part {
"\n" => buff.push_str("\\n"),
_ => {
buff.push('\\');
buff.push_str(part);
},
}
last_end = start + part.len();
}
buff.push_str(unsafe { s.get_unchecked(last_end..s.len()) });
}
/// Escapes the provided field string value `s` and adds the newly escaped values to `buff`.
/// > NOTE: Source for this is an adaptation from std::String::replace
pub fn build_escapedfieldstr(s: &str, buff: &mut String) {
let mut last_end = 0;
let matcher = |c: char| match c {
'\n' | '"' => true,
_ => false,
};
buff.push('"');
for (start, part) in s.match_indices(matcher) {
buff.push_str(unsafe { s.get_unchecked(last_end..start) });
match part {
"\n" => buff.push_str("\\n"),
"\"" => buff.push_str("\\\""),
_ => (),
}
last_end = start + part.len();
}
buff.push_str(unsafe { s.get_unchecked(last_end..s.len()) });
buff.push('"')
}
/// Returns a new string, with the tag escaped version of `s`.
pub fn escape_tagstr(s: &str) -> String {
let mut new_s = String::with_capacity(s.len()+16);
build_escapedtagstr(s, &mut new_s);
new_s
}
/// Returns a new string, with the field escaped version of `s`.
pub fn escape_fieldstr(s: &str) -> String {
let mut result = String::new();
build_escapedfieldstr(s, &mut result);
result
}
|
extern crate rustbf;
use std::{env, io, fs::File};
use std::io::{Read};
use std::time::{Instant};
use rustbf::{LoopMap};
const MEMORY_LENGTH: usize = 30000;
fn main() {
// file name of bf source.
let args = env::args().collect::<Vec<_>>();
let source_file = args.get(1).expect("Filename is invalid");
// number of repeat.
let repeat = args.get(2).expect("Repeat number is undefined");
let repeat = usize::from_str_radix(repeat, 10).unwrap();
// load source code.
let code = read_source(source_file).unwrap();
// run code.
// let out = run(&code);
// io::stdout().write_all(&out).unwrap();
for _ in 0 .. repeat {
let start_time = Instant::now();
run(&code);
let elapsed = start_time.elapsed();
let nanos = elapsed.as_secs() * 1_000_000_000 + u64::from(elapsed.subsec_nanos());
println!("{}", nanos);
}
}
/// Read source file.
fn read_source(path: &str) -> io::Result<Vec<u8>> {
// Open source file.
let mut f = File::open(path)?;
// Get metadata to determine vec length.
let meta = f.metadata()?;
let mut out = Vec::with_capacity(meta.len() as usize);
// Read all source code.
f.read_to_end(&mut out)?;
Ok(out)
}
fn run(code: &[u8]) -> Vec<u8> {
let code_len = code.len();
// output buffer.
let mut out = Vec::new();
// Initialize memory buffer.
let mut memory = init_memory();
// setup cache of loops.
let mut loop_map = LoopMap::new(code);
// initialize pc and pointer.
let mut pc: usize = 0;
let mut mem: usize = 0;
// main loop.
while pc < code_len {
let inst = code[pc];
match inst {
0x2b => {
// '+'
memory[mem] = memory[mem].wrapping_add(1);
},
0x2d => {
// '-'
memory[mem] = memory[mem].wrapping_sub(1);
},
0x3c => {
// '<'
if mem > 0 {
mem -= 1;
} else {
panic!("Memory pointer out of range");
}
},
0x3e => {
// '>'
if mem < MEMORY_LENGTH - 1 {
mem += 1;
} else {
panic!("Memory pointer out of range");
}
},
0x2e => {
// '.'
out.push(memory[mem]);
},
0x5b => {
// '['
let end = loop_map.find_end(pc);
if memory[mem] == 0 {
// skip the loop.
pc = end;
}
},
0x5d => {
// ']'
if memory[mem] != 0 {
// jump back to the loop start.
pc = loop_map.find_start(pc);
}
},
0x2c => {
panic!("',' is not supported");
},
// Ignore non-instructions
_ => (),
};
pc += 1;
}
out
}
/// Initialize a buffer of memory.
fn init_memory() -> Vec<u8> {
vec![0; MEMORY_LENGTH]
}
|
extern crate cryptopals;
#[test]
fn test_base64() {
let src = "49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d";
let dst = "SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t";
assert_eq!(cryptopals::hex_to_base64(src.to_string()), dst);
}
|
//! Home ページ
use yew::prelude::*;
pub struct Home {}
impl Component for Home {
type Message = ();
type Properties = ();
fn create(_: Self::Properties, _: ComponentLink<Self>) -> Self {
Self {}
}
fn update(&mut self, _: Self::Message) -> ShouldRender {
false
}
fn change(&mut self, _: Self::Properties) -> ShouldRender {
false
}
fn view(&self) -> Html {
html! {
<>
<h2>{"About"}</h2>
{"はじめまして。Du Yichengと申します。京都大学情報学研究科の"}
<a href="http://sap.ist.i.kyoto-u.ac.jp/">{"音声メディア研究室(河原研究室)"}</a>
{"に在籍する大学院生です。"}<br/>
{"学部時代からずっと機械学習の開発&研究に取り組んでおり、"}
{"研究室で音声を対象とする機械学習&信号処理技術の勉強と研究をしながら、"}
{"アルバイトで機械学習エンジニアの仕事をしています。"}
{"ソフトウェアエンジニアの経験もありましたが、ここ2、3年くらいはずっと機械学習エンジニアとして仕事をしており、"}
{"日立製作所の基礎研究センタを経て、現在は"}
<a href="https://biome.co.jp/">{"株式会社バイオーム"}</a>
{"でコンピュータビジョンの研究開発を行っています。"}<br/>
<a href="https://biome.co.jp/app-biome/">{"いきものコレクションアプリ『バイオーム』"}</a>
{"の画像認識機能を作っています!もしよかったらダウンロードしてみてください!"}
<h2>{"取り組み分野"}</h2>
<ul>
<li>
{"音響信号処理"}
<ul>
<li>{"音源分離/音声強調"}</li>
</ul>
</li>
<li>
{"コンピュータビジョン"}
<ul>
<li>{"詳細画像識別"}</li>
<li>{"不均衡データ"}</li>
</ul>
</li>
<li>
{"グラフコンボリューション"}
</li>
</ul>
{"最近はHololens2でAR(拡張現実)/MR(複合現実)の勉強もしています!"}
<h2>{"趣味(プログラミング以外笑)"}</h2>
<ul>
<li>{"ロードバイク"}</li>
<li>
{"アニメ"}<br/>
{"「らき☆すた」とか、「銀河英雄伝説」(旧作のほう)とか"}
</li>
<li>
{"料理(下手の横好きレベル)"}<br/>
{"簡単なイタリアンが作れるくらい"}
</li>
</ul>
</>
}
}
}
|
pub struct Block {
data: Vec<u8>,
}
impl Block {
pub fn data(&self) -> &[u8] {
&self.data
}
}
pub struct BlockBuilder {
buf: Vec<u8>,
}
impl BlockBuilder {
pub fn new(block_size: usize) -> Self {
todo!();
}
/// Return whether block_size exceeded.
/// This is a no-op when returning true.
pub fn add(&mut self, data: &[u8]) -> bool {
todo!()
}
pub fn curr_size(&self) -> usize {
todo!()
}
pub fn finish(&mut self) -> Block {
todo!()
}
pub fn reset(&mut self) {}
}
pub trait BlockIter {
type Key;
type Value;
fn new(block: Block) -> Self;
fn seek(&mut self, key: &Self::Key) -> Option<()>;
fn next(&mut self);
fn value(&self) -> &Self::Value;
}
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test an interesting corner case that ought to be legal (though the
// current code actually gets it wrong, see below): a fn that takes
// two arguments that are references with the same lifetime is in fact
// equivalent to a fn that takes two references with distinct
// lifetimes. This is true because the two functions can call one
// another -- effectively, the single lifetime `'a` is just inferred
// to be the intersection of the two distinct lifetimes.
//
// FIXME: However, we currently reject this example with an error,
// because of how we handle binders and equality in `relate_tys`.
//
// compile-flags:-Zno-leak-check
#![feature(nll)]
use std::cell::Cell;
fn make_cell_aa() -> Cell<for<'a> fn(&'a u32, &'a u32)> {
panic!()
}
fn aa_eq_ab() {
let a: Cell<for<'a, 'b> fn(&'a u32, &'b u32)> = make_cell_aa();
//~^ ERROR higher-ranked subtype error
drop(a);
}
fn main() { }
|
extern crate astar;
extern crate pancurses;
extern crate rand;
mod controller;
mod model;
mod view;
mod fiends;
mod util;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::env;
use model::*;
use pancurses::*;
fn initial_world() -> WorldData {
let mut world_data = WorldData {
statics: [[None; X]; Y],
mobiles: [[None; X]; Y],
fiends: BTreeSet::new(),
arrows: BTreeSet::new(),
turrets: BTreeSet::new(),
obstacles: BTreeSet::new(),
gates: BTreeSet::new(),
player_info: PlayerInfo {
location: (20, 20),
health: 100,
max_health: 100,
damage_factor: 1,
heal_factor: 1,
armour_factor: 1,
},
goal_location: (X / 2, Y / 2),
log: ["".to_string(), "".to_string(), "".to_string(), "".to_string(), "".to_string()],
cash: 0,
wave: 0,
};
// add walls!
for x in 0..X {
world_data.statics[0][x] = Some(Wall);
world_data.statics[Y - 1][x] = Some(Wall);
}
for y in 0..Y {
world_data.statics[y][0] = Some(Wall);
world_data.statics[y][X - 1] = Some(Wall);
}
// add goal!
world_data.statics[Y / 2][X / 2] = Some(Goal {
health: 10,
max_health: 10,
});
// add gates!
for x in 0..7 {
let gx = x + (X / 2) - 3;
world_data.statics[0][gx] = Some(Gate);
world_data.statics[Y - 1][gx] = Some(Gate);
world_data.gates.insert((gx, 0));
world_data.gates.insert((gx, Y - 1));
}
for y in 0..7 {
let gy = y + (Y / 2) - 3;
world_data.statics[gy][0] = Some(Gate);
world_data.statics[gy][X - 1] = Some(Gate);
world_data.gates.insert((0, gy));
world_data.gates.insert((X - 1, gy));
}
world_data.mobiles[20][20] = Some(Player);
return world_data;
}
fn main() {
if env::args().nth(1) == Some("waves".to_string()) {
waves()
} else {
play()
}
}
fn waves() {
for wave in 1..101 {
let the_fiends = fiends::make_wave(wave);
let mut the_names = BTreeMap::new();
for fiend in the_fiends {
let zero = 0;
let mut how_many = 0;
{
how_many = *the_names.get(&fiend.name).unwrap_or(&zero);
}
the_names.insert(fiend.name, how_many + 1);
}
let mut names = "".to_string();
let mut i = 0;
for (name, how_many) in &the_names {
i += 1;
names = format!("{} {}x {}{}",
names,
how_many,
name,
if i == the_names.len() { "" } else { "," });
}
if wave % 10 == 0 {
println!("Wave {}:{} [BIG BOSS]", wave, names);
} else if wave % 5 == 0 {
println!("Wave {}:{} [BOSS]", wave, names);
} else {
println!("Wave {}:{}", wave, names);
}
}
}
fn play() {
let window = initscr();
let _ = noecho();
let _ = curs_set(0);
let mut world_data = initial_world();
let game_windows = view::setup_render(&window);
let _ = window.keypad(true);
let mut gamestate = model::Startup;
while gamestate != model::End {
world_data.render(&game_windows, &gamestate);
if let Some(i) = window.getch() {
gamestate.handle(&mut world_data, i)
}
}
let _ = endwin();
}
|
use cpu::register::Register;
use cpu::CPU;
/// Compare a register to the accumulator and set the flags based on the comparison
///
/// Sets conditions flags
///
/// # Cycles
///
/// * Register M: 7
/// * Other: 4
///
/// # Arguments
///
/// * `cpu` - The cpu to perform the comparison in
/// * `register` - The register to compare to the accumulator
///
pub fn cmp(cpu: &mut CPU, register: Register) -> u8 {
let (result, borrow) = match register {
Register::A => cpu.a.overflowing_sub(cpu.a),
Register::B => cpu.a.overflowing_sub(cpu.b),
Register::C => cpu.a.overflowing_sub(cpu.c),
Register::D => cpu.a.overflowing_sub(cpu.d),
Register::E => cpu.a.overflowing_sub(cpu.e),
Register::H => cpu.a.overflowing_sub(cpu.h),
Register::L => cpu.a.overflowing_sub(cpu.l),
Register::M => {
let offset = (u16::from(cpu.h) << 8) + u16::from(cpu.l);
cpu.a.overflowing_sub(cpu.memory[offset as usize])
}
unsupported => {
panic!("sub doesn't support {:?}", unsupported);
}
};
cpu.flags.set(result, borrow);
match register {
Register::M => 7,
_ => 4,
}
}
/// Compare the accumulator to the next immediate byte and set the flags based on the comparison
///
/// Sets conditions flags
///
/// # Cycles
///
/// 4
///
/// # Arguments
///
/// * `cpu` - The cpu to perform the comparison in
///
pub fn cpi(cpu: &mut CPU) -> u8 {
let byte = cpu.read_byte().unwrap();
let (result, borrow) = cpu.a.overflowing_sub(byte);
cpu.flags.set(result, borrow);
7
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn cmp_b_with_smaller_b_compares_b_to_accumulator_and_sets_flags() {
let mut cpu = CPU {
a: 10,
b: 9,
..CPU::default()
};
cmp(&mut cpu, Register::B);
assert_eq!(cpu.a, 10);
assert_eq!(cpu.b, 9);
assert_eq!(cpu.flags.carry, false);
assert_eq!(cpu.flags.zero, false);
assert_eq!(cpu.flags.sign, false);
assert_eq!(cpu.flags.parity, false);
}
#[test]
fn cmp_b_with_equal_b_compares_b_to_accumulator_and_sets_flags() {
let mut cpu = CPU {
a: 10,
b: 10,
..CPU::default()
};
cmp(&mut cpu, Register::B);
assert_eq!(cpu.a, 10);
assert_eq!(cpu.b, 10);
assert_eq!(cpu.flags.carry, false);
assert_eq!(cpu.flags.zero, true);
assert_eq!(cpu.flags.sign, false);
assert_eq!(cpu.flags.parity, true);
}
#[test]
fn cmp_b_with_larger_b_compares_b_to_accumulator_and_sets_flags() {
let mut cpu = CPU {
a: 10,
b: 11,
..CPU::default()
};
cmp(&mut cpu, Register::B);
assert_eq!(cpu.a, 10);
assert_eq!(cpu.b, 11);
assert_eq!(cpu.flags.carry, true);
assert_eq!(cpu.flags.zero, false);
assert_eq!(cpu.flags.sign, true);
assert_eq!(cpu.flags.parity, true);
}
#[test]
fn cpi_with_smaller_immediate_byte_compares_it_to_accumulator_and_sets_flags() {
let mut cpu = CPU {
memory: vec![9],
a: 10,
..CPU::default()
};
cpi(&mut cpu);
assert_eq!(cpu.a, 10);
assert_eq!(cpu.flags.carry, false);
assert_eq!(cpu.flags.zero, false);
assert_eq!(cpu.flags.sign, false);
assert_eq!(cpu.flags.parity, false);
}
#[test]
fn cpi_with_equal_immediate_byte_compares_it_to_accumulator_and_sets_flags() {
let mut cpu = CPU {
memory: vec![10],
a: 10,
..CPU::default()
};
cpi(&mut cpu);
assert_eq!(cpu.a, 10);
assert_eq!(cpu.flags.carry, false);
assert_eq!(cpu.flags.zero, true);
assert_eq!(cpu.flags.sign, false);
assert_eq!(cpu.flags.parity, true);
}
#[test]
fn cpi_with_larget_immediate_byte_compares_it_to_accumulator_and_sets_flags() {
let mut cpu = CPU {
memory: vec![11],
a: 10,
..CPU::default()
};
cpi(&mut cpu);
assert_eq!(cpu.a, 10);
assert_eq!(cpu.flags.carry, true);
assert_eq!(cpu.flags.zero, false);
assert_eq!(cpu.flags.sign, true);
assert_eq!(cpu.flags.parity, true);
}
}
|
use crypto_pals::num_blocks;
use openssl::symm::{decrypt, encrypt, Cipher};
use rand::{Rng,RngCore};
const DEFAULT_KEY: &[u8] = b"YELLOW SUBMARINE";
pub struct MyCiphers {
core: Cipher,
key: Vec<u8>,
}
impl MyCiphers {
pub fn init_default() -> MyCiphers {
MyCiphers {
core: Cipher::aes_128_ecb(),
key: DEFAULT_KEY.to_vec(),
}
}
pub fn init(key: Vec<u8>) -> MyCiphers {
MyCiphers {
core: Cipher::aes_128_ecb(),
key
}
}
pub fn get_core(&self) -> Cipher {
self.core
}
pub fn get_key(&self) -> &Vec<u8> {
&self.key
}
pub fn xcrypt_ctr(&self, text: &[u8], nonce: u64) -> Vec<u8> {
let _counter: u64 = 0;
let keystream = self.generate_keystream(text, nonce);
keystream
.iter()
.zip(text.iter())
.map(|(x,y)| x ^ y)
.collect()
}
pub fn generate_keystream(&self, text: &[u8], nonce: u64) -> Vec<u8> {
let num_blocks = num_blocks(&self.key, text);
let mut counter: u64 = 0;
let mut keystream = vec![];
for _block_num in 0..num_blocks {
let nonce_and_counter = [nonce.to_le_bytes(), counter.to_le_bytes()].concat();
let mut stream_block = self.encrypt_ecb(nonce_and_counter.as_ref());
keystream.append(&mut stream_block);
counter += 1;
}
keystream.truncate(text.len());
return keystream;
}
pub fn encrypt_ecb(&self, pt: &[u8]) -> Vec<u8> {
// todo figure out Result handling
encrypt(self.core, &self.key, None, &pt).unwrap()
}
pub fn decrypt_ecb(&self, ct: &[u8]) -> Vec<u8> {
// todo figure out Result handling
decrypt(self.core, &self.key, None, &ct).unwrap()
}
//s4c25 - CTR random access read/write
pub fn edit(&self, ct: &mut [u8], nonce: u64, offset: usize, new_text: &[u8]) -> Vec<u8> {
let keystream = self.generate_keystream(ct, nonce);
let end_offset = offset + new_text.len();
let keystream_subset = &keystream[offset..end_offset];
new_text.iter()
.zip(keystream_subset)
.map(|(x,y)|x ^ y)
.enumerate()
.for_each(|(i, val)| ct[i+offset]=val);
return ct.to_vec();
}
}
#[test]
fn test_encrypt_decrypt_ecb() {
let cipher = MyCiphers::init_default();
let pt = b"this is 32 charsthis is 32 chars";
let ct = cipher.encrypt_ecb(pt);
let result = cipher.decrypt_ecb(&ct);
assert_eq!(pt, &result[..]);
}
#[test]
fn test_encrypt_decrypt_ctr() {
let cipher = MyCiphers::init_default();
let mut rng = rand::thread_rng();
let nonce: u64 = rng.next_u64();
// 128 bytes
let pt = b"this is 32 charsthis is 32 chars";
let ct = cipher.xcrypt_ctr(pt, nonce);
let result = cipher.xcrypt_ctr(&ct, nonce);
assert_eq!(pt, &result[..]);
}
|
//! Lumen intrinsics
pub mod apply_apply_2_1;
pub mod apply_apply_3_1;
pub mod is_big_integer_1;
pub mod is_small_integer_1;
pub mod log_exit_1;
use liblumen_alloc::erts::term::prelude::*;
pub fn module() -> Atom {
Atom::from_str("lumen")
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.