text stringlengths 8 4.13M |
|---|
use std::fmt;
use std::io;
#[derive(Debug)]
pub enum Error {
Io(io::Error),
Syntax(String),
}
impl From<io::Error> for Error {
fn from(e: io::Error) -> Self {
Error::Io(e)
}
}
impl From<String> for Error {
fn from(e: String) -> Self {
Error::Syntax(e)
}
}
impl Error {
pub fn at_byte(byte: usize) -> Self {
Error::Syntax(format!(
"Found a ] with no correspoding [ at byte: {}",
byte
))
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::Io(e) => write!(f, "{}", e),
Error::Syntax(e) => write!(f, "{}", e),
}
}
}
|
pub use std::fmt::{Debug, Display};
#[cfg(test)]
pub use std::error::Error;
#[cfg(not(test))]
pub trait Error: Debug + Display {
fn description(&self) -> &str;
fn cause(&self) -> Option<&Error> {
None
}
}
|
extern crate electrum_client;
use electrum_client::Client;
fn main() {
let mut client = Client::new_ssl(
"electrum2.hodlister.co:50002",
Some("electrum2.hodlister.co"),
)
.unwrap();
let res = client.server_features();
println!("{:#?}", res);
}
|
use board;
use tile;
use cell;
use std::vec::Vec;
pub fn score_word(word: Vec<cell>&) -> usize {
let letter_multiplier: usize = match word[0]._bonus {
cell::Bonus::DoubleLetter => 2,
cell::Bonus::TripleLetter => 3,
_ => 1,
};
let word_multiplier: usize = match word[0]._bonus {
cell::Bonus::DoubleWord => 2,
cell::Bonus::TripleWord => 3,
_ => 1,
}
let score = match word[0]._tile {
Some(t) => word_multiplier * (letter_multiplier * t.value + score_word(&word[1..])),
None => panic!("EMPTY TILE CANNOT BE SCORED!"),
}
} |
// https://sope.prod.reuters.tv/program/rcom/v1/article-recirc?edition=cn&modules=rightrail,ribbon,bottom
#[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TRRoot {
pub rightrail: TRRibbon,
pub ribbon: TRRibbon,
pub bottom: TRRibbon,
}
impl crate::HasRecs for TRRoot {
fn to_recs(&self) -> Vec<Vec<String>> {
let mut recs: Vec<Vec<String>> = Vec::new();
for list in [&self.rightrail, &self.ribbon, &self.bottom].iter() {
recs.append(&mut list.to_recs());
}
return recs;
}
}
#[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TRRibbon {
#[serde(rename = "ab_test")]
pub ab_test: Vec<::serde_json::Value>,
pub errors: Vec<::serde_json::Value>,
pub stories: Vec<TRStory>,
pub tags: Vec<String>,
}
impl crate::HasRecs for TRRibbon {
fn to_recs(&self) -> Vec<Vec<String>> {
self.stories.iter().map(|x| x.to_rec()).collect()
}
}
#[derive(Default, Debug, Clone, PartialEq, serde_derive::Serialize, serde_derive::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TRStory {
pub updated: i64,
pub headline: String,
pub image: String,
pub reason: String,
pub path: String,
pub id: String,
pub channel: ::serde_json::Value,
}
impl TRStory {
pub fn to_rec(&self) -> Vec<String> {
return vec![
self.id.to_string(),
self.updated.to_string(),
self.headline.to_string(),
self.reason.to_string(),
self.path.to_string(),
];
}
}
|
fn main() {
let mut n = 0;
let mut frac = vec![];
while frac.len() < 1000000 {
n += 1;
let mut tmp = n;
let mut stack = vec![];
while tmp > 0 {
stack.push(tmp % 10);
tmp /= 10;
}
while stack.len() > 0 {
frac.push(stack.pop().unwrap())
}
}
println!("{}", frac[0] * frac[10-1] * frac[100-1] * frac[1000-1] * frac[10000-1] * frac[100000-1] * frac[1000000-1])
} |
use super::{Object, TaggedValue};
use crate::runtime::Symbol;
use crate::SchemeExpression;
impl Object {
pub fn undef() -> Self {
Object::new(TaggedValue::Undef)
}
pub fn nil() -> Self {
Object::new(TaggedValue::Nil)
}
pub fn integer(value: i64) -> Self {
Object::new(TaggedValue::Integer(value))
}
pub fn float(value: f64) -> Self {
Object::new(TaggedValue::Float(value))
}
pub fn symbol<T: AsRef<str> + ToString>(name: T) -> Self {
Object::new(TaggedValue::Symbol(Symbol::new(name)))
}
pub fn string(content: String) -> Self {
Object::new(TaggedValue::String(content))
}
pub fn function(ptr: *const u8) -> Self {
Object::new(TaggedValue::Function(ptr))
}
pub fn cons(car: Object, cdr: Object) -> Self {
Object::new(TaggedValue::Pair(Box::new(car), Box::new(cdr)))
}
}
pub struct ListBuilder {
partial_list: Box<Object>,
cursor: *mut Object,
}
impl ListBuilder {
pub fn new() -> Self {
let mut builder = ListBuilder {
partial_list: Box::new(Object::nil()),
cursor: 0 as *mut _,
};
builder.cursor = builder.partial_list.as_mut();
builder
}
pub fn append(&mut self, item: Object) {
unsafe {
*self.cursor = Object::cons(item, Object::nil());
self.cursor = (*self.cursor).cdr_mut().unwrap();
}
}
pub fn set_cdr(&mut self, item: Object) {
unsafe {
*self.cursor = item;
}
}
pub fn build(self) -> Object {
*self.partial_list
}
}
|
use bech32::ToBase32;
use bitcoin_hashes::hash160;
use bitcoin_hashes::Hash;
use secp256k1::{Secp256k1};
use secp256k1::rand::{thread_rng};
use std::env;
use std::io::Write;
use std::sync::{atomic::AtomicBool, atomic::AtomicU64, atomic::Ordering, Arc};
use std::time::SystemTime;
const CHARSET: [char; 32] = [
'q', 'p', 'z', 'r', 'y', '9', 'x', '8', 'g', 'f', '2', 't', 'v', 'd', 'w', '0', 's', '3', 'j',
'n', '5', '4', 'k', 'h', 'c', 'e', '6', 'm', 'u', 'a', '7', 'l',
];
fn run(id: i32, prefix: String, counter: Arc<AtomicU64>, flag: Arc<AtomicBool>) {
let mut local_count = 0;
let start_time = SystemTime::now();
let sync_num = 10000;
let log_num = 100000;
let estimated_hash_num = 32.0_f64.powi(prefix.len() as i32 - 4); // except 'bc1q'
let secp = Secp256k1::new();
loop {
let (secret_key, public_key) = secp.generate_keypair(&mut thread_rng());
let mut hash_engine = hash160::Hash::engine();
hash_engine
.write_all(&public_key.serialize())
.unwrap();
let hash_data = hash160::Hash::from_engine(hash_engine)[..].to_vec();
let version = bech32::u5::try_from_u8(0).unwrap();
let address =
bech32::encode("bc", [vec![version], hash_data.to_base32()].concat()).unwrap();
if address.to_string().starts_with(&prefix) {
let mut ret = [0; 34];
ret[0] = 128;
ret[1..33].copy_from_slice(&secret_key[..]);
ret[33] = 1;
let private_key = bs58::encode(&ret[..]).with_check().into_string();
println!("result:");
println!("privkey:\t{}", private_key);
println!("address:\t{}", address.to_string());
flag.store(true, Ordering::SeqCst);
break;
}
local_count += 1;
if local_count % sync_num == 0 {
if flag.load(Ordering::SeqCst) {
break;
}
counter.fetch_add(sync_num, Ordering::SeqCst);
}
if id == 0 && local_count % log_num == 0 {
let elapsed_secs = start_time.elapsed().unwrap().as_millis() as f64 / 1000.0;
let total_count = counter.load(Ordering::SeqCst);
let speed = (total_count as f64) / elapsed_secs;
let time_left = (estimated_hash_num - total_count as f64) / speed;
println!(
"count: {}\telapsed: {:.2}min\tspeed: {:.2}/s\tprogress(est): {:.2}%\tleft(est): {:.2}min",
total_count,
elapsed_secs / 60.0,
speed,
((total_count as f64) / estimated_hash_num * 100.0),
time_left / 60.0
);
}
}
}
fn main() {
if env::args().len() < 2 {
eprintln!(
"usage: {} <the address prefix to match>",
env::args().nth(0).unwrap()
);
return;
}
let args: Vec<String> = env::args().collect();
for c in args[1].chars() {
if !CHARSET.contains(&c) {
eprintln!("invalid char: {}", c);
return;
}
}
let prefix: String = "bc1q".to_string() + &args[1];
println!("checking prefix {}", prefix);
let counter = Arc::new(AtomicU64::new(0));
let flag = Arc::new(AtomicBool::new(false));
let thread_num = num_cpus::get() / 2;
let mut threads = Vec::new();
for idx in 0..thread_num {
let local_counter = counter.clone();
let local_flag = flag.clone();
let prefix = prefix.clone();
let thread = std::thread::spawn(move || run(idx as i32, prefix, local_counter, local_flag));
threads.push(thread);
}
for thread in threads {
thread.join().unwrap();
}
}
|
use sparser_bitfield::{Bitfield, Change};
#[test]
fn can_create_bitfield() {
let _bits = Bitfield::new();
}
#[test]
fn basic_set_get() {
let mut bits = Bitfield::new();
bits.set(0);
assert_eq!(
bits.get(0),
true);
}
#[test]
fn can_set_bits() {
let mut bits = Bitfield::new();
bits.set(100);
bits.set(1_000);
bits.set(1_000_000);
bits.set(1_000_000_000);
bits.set(1_000_000_000_000);
}
#[test]
fn can_get_bits() {
let mut bits = Bitfield::new();
bits.set(0);
bits.set(1);
bits.set(1000);
assert_eq!(bits.get(0), true);
assert_eq!(bits.get(1), true);
}
#[test]
fn returns_if_flipped() {
let mut bits = Bitfield::new();
assert_eq!(bits.set(0), Change::Changed);
assert_eq!(bits.reset(0), Change::Changed);
assert_eq!(bits.set(0), Change::Changed);
assert_eq!(bits.set(0), Change::Unchanged);
assert_eq!(bits.set(0), Change::Unchanged);
}
#[test]
fn exposes_changed_unchanged_methods() {
let mut bits = Bitfield::new();
assert!(bits.set(0).is_changed());
assert!(bits.set(0).is_unchanged());
}
|
use crate::{MavFrame, MavHeader, MavlinkVersion, Message};
use std::io::{self};
#[cfg(feature = "tcp")]
mod tcp;
#[cfg(feature = "udp")]
mod udp;
#[cfg(feature = "direct-serial")]
mod direct_serial;
mod file;
/// A MAVLink connection
pub trait MavConnection<M: Message> {
/// Receive a mavlink message.
///
/// Blocks until a valid frame is received, ignoring invalid messages.
fn recv(&self) -> io::Result<(MavHeader, M)>;
/// Send a mavlink message
fn send(&self, header: &MavHeader, data: &M) -> io::Result<()>;
fn set_protocol_version(&mut self, version: MavlinkVersion);
fn get_protocol_version(&self) -> MavlinkVersion;
/// Write whole frame
fn send_frame(&self, frame: &MavFrame<M>) -> io::Result<()> {
self.send(&frame.header, &frame.msg)
}
/// Read whole frame
fn recv_frame(&self) -> io::Result<MavFrame<M>> {
let (header, msg) = self.recv()?;
let protocol_version = self.get_protocol_version();
Ok(MavFrame {
header,
msg,
protocol_version,
})
}
/// Send a message with default header
fn send_default(&self, data: &M) -> io::Result<()> {
let header = MavHeader::default();
self.send(&header, data)
}
}
/// Connect to a MAVLink node by address string.
///
/// The address must be in one of the following formats:
///
/// * `tcpin:<addr>:<port>` to create a TCP server, listening for incoming connections
/// * `tcpout:<addr>:<port>` to create a TCP client
/// * `udpin:<addr>:<port>` to create a UDP server, listening for incoming packets
/// * `udpout:<addr>:<port>` to create a UDP client
/// * `serial:<port>:<baudrate>` to create a serial connection
/// * `file:<path>` to extract file data
///
/// The type of the connection is determined at runtime based on the address type, so the
/// connection is returned as a trait object.
pub fn connect<M: Message>(address: &str) -> io::Result<Box<dyn MavConnection<M> + Sync + Send>> {
let protocol_err = Err(io::Error::new(
io::ErrorKind::AddrNotAvailable,
"Protocol unsupported",
));
if cfg!(feature = "tcp") && address.starts_with("tcp") {
#[cfg(feature = "tcp")]
{
tcp::select_protocol(address)
}
#[cfg(not(feature = "tcp"))]
{
protocol_err
}
} else if cfg!(feature = "udp") && address.starts_with("udp") {
#[cfg(feature = "udp")]
{
udp::select_protocol(address)
}
#[cfg(not(feature = "udp"))]
{
protocol_err
}
} else if cfg!(feature = "direct-serial") && address.starts_with("serial:") {
#[cfg(feature = "direct-serial")]
{
Ok(Box::new(direct_serial::open(&address["serial:".len()..])?))
}
#[cfg(not(feature = "direct-serial"))]
{
protocol_err
}
} else if address.starts_with("file") {
Ok(Box::new(file::open(&address["file:".len()..])?))
} else {
protocol_err
}
}
|
#[doc = "Register `C2EMR2` reader"]
pub type R = crate::R<C2EMR2_SPEC>;
#[doc = "Register `C2EMR2` writer"]
pub type W = crate::W<C2EMR2_SPEC>;
#[doc = "Field `MR32` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub type MR32_R = crate::BitReader<MR32_A>;
#[doc = "CPU2 interrupt Mask on Direct Event input x+32\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MR32_A {
#[doc = "0: Interrupt request line is masked"]
Masked = 0,
#[doc = "1: Interrupt request line is unmasked"]
Unmasked = 1,
}
impl From<MR32_A> for bool {
#[inline(always)]
fn from(variant: MR32_A) -> Self {
variant as u8 != 0
}
}
impl MR32_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> MR32_A {
match self.bits {
false => MR32_A::Masked,
true => MR32_A::Unmasked,
}
}
#[doc = "Interrupt request line is masked"]
#[inline(always)]
pub fn is_masked(&self) -> bool {
*self == MR32_A::Masked
}
#[doc = "Interrupt request line is unmasked"]
#[inline(always)]
pub fn is_unmasked(&self) -> bool {
*self == MR32_A::Unmasked
}
}
#[doc = "Field `MR32` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub type MR32_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O, MR32_A>;
impl<'a, REG, const O: u8> MR32_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Interrupt request line is masked"]
#[inline(always)]
pub fn masked(self) -> &'a mut crate::W<REG> {
self.variant(MR32_A::Masked)
}
#[doc = "Interrupt request line is unmasked"]
#[inline(always)]
pub fn unmasked(self) -> &'a mut crate::W<REG> {
self.variant(MR32_A::Unmasked)
}
}
#[doc = "Field `MR33` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR33_R;
#[doc = "Field `MR34` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR34_R;
#[doc = "Field `MR35` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR35_R;
#[doc = "Field `MR36` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR36_R;
#[doc = "Field `MR37` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR37_R;
#[doc = "Field `MR38` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR38_R;
#[doc = "Field `MR39` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR39_R;
#[doc = "Field `MR40` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR40_R;
#[doc = "Field `MR41` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR41_R;
#[doc = "Field `MR42` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR42_R;
#[doc = "Field `MR43` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR43_R;
#[doc = "Field `MR44` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR44_R;
#[doc = "Field `MR46` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR46_R;
#[doc = "Field `MR47` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR47_R;
#[doc = "Field `MR48` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR48_R;
#[doc = "Field `MR49` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR49_R;
#[doc = "Field `MR50` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR50_R;
#[doc = "Field `MR51` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR51_R;
#[doc = "Field `MR52` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR52_R;
#[doc = "Field `MR53` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR53_R;
#[doc = "Field `MR54` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR54_R;
#[doc = "Field `MR55` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR55_R;
#[doc = "Field `MR56` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR56_R;
#[doc = "Field `MR57` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR57_R;
#[doc = "Field `MR58` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR58_R;
#[doc = "Field `MR59` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR59_R;
#[doc = "Field `MR60` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR60_R;
#[doc = "Field `MR61` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR61_R;
#[doc = "Field `MR62` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR62_R;
#[doc = "Field `MR63` reader - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_R as MR63_R;
#[doc = "Field `MR33` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR33_W;
#[doc = "Field `MR34` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR34_W;
#[doc = "Field `MR35` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR35_W;
#[doc = "Field `MR36` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR36_W;
#[doc = "Field `MR37` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR37_W;
#[doc = "Field `MR38` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR38_W;
#[doc = "Field `MR39` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR39_W;
#[doc = "Field `MR40` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR40_W;
#[doc = "Field `MR41` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR41_W;
#[doc = "Field `MR42` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR42_W;
#[doc = "Field `MR43` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR43_W;
#[doc = "Field `MR44` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR44_W;
#[doc = "Field `MR46` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR46_W;
#[doc = "Field `MR47` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR47_W;
#[doc = "Field `MR48` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR48_W;
#[doc = "Field `MR49` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR49_W;
#[doc = "Field `MR50` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR50_W;
#[doc = "Field `MR51` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR51_W;
#[doc = "Field `MR52` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR52_W;
#[doc = "Field `MR53` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR53_W;
#[doc = "Field `MR54` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR54_W;
#[doc = "Field `MR55` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR55_W;
#[doc = "Field `MR56` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR56_W;
#[doc = "Field `MR57` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR57_W;
#[doc = "Field `MR58` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR58_W;
#[doc = "Field `MR59` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR59_W;
#[doc = "Field `MR60` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR60_W;
#[doc = "Field `MR61` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR61_W;
#[doc = "Field `MR62` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR62_W;
#[doc = "Field `MR63` writer - CPU2 interrupt Mask on Direct Event input x+32"]
pub use MR32_W as MR63_W;
impl R {
#[doc = "Bit 0 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr32(&self) -> MR32_R {
MR32_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr33(&self) -> MR33_R {
MR33_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr34(&self) -> MR34_R {
MR34_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr35(&self) -> MR35_R {
MR35_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr36(&self) -> MR36_R {
MR36_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 5 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr37(&self) -> MR37_R {
MR37_R::new(((self.bits >> 5) & 1) != 0)
}
#[doc = "Bit 6 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr38(&self) -> MR38_R {
MR38_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr39(&self) -> MR39_R {
MR39_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr40(&self) -> MR40_R {
MR40_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr41(&self) -> MR41_R {
MR41_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr42(&self) -> MR42_R {
MR42_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr43(&self) -> MR43_R {
MR43_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr44(&self) -> MR44_R {
MR44_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 14 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr46(&self) -> MR46_R {
MR46_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr47(&self) -> MR47_R {
MR47_R::new(((self.bits >> 15) & 1) != 0)
}
#[doc = "Bit 16 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr48(&self) -> MR48_R {
MR48_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr49(&self) -> MR49_R {
MR49_R::new(((self.bits >> 17) & 1) != 0)
}
#[doc = "Bit 18 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr50(&self) -> MR50_R {
MR50_R::new(((self.bits >> 18) & 1) != 0)
}
#[doc = "Bit 19 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr51(&self) -> MR51_R {
MR51_R::new(((self.bits >> 19) & 1) != 0)
}
#[doc = "Bit 20 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr52(&self) -> MR52_R {
MR52_R::new(((self.bits >> 20) & 1) != 0)
}
#[doc = "Bit 21 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr53(&self) -> MR53_R {
MR53_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr54(&self) -> MR54_R {
MR54_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr55(&self) -> MR55_R {
MR55_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr56(&self) -> MR56_R {
MR56_R::new(((self.bits >> 24) & 1) != 0)
}
#[doc = "Bit 25 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr57(&self) -> MR57_R {
MR57_R::new(((self.bits >> 25) & 1) != 0)
}
#[doc = "Bit 26 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr58(&self) -> MR58_R {
MR58_R::new(((self.bits >> 26) & 1) != 0)
}
#[doc = "Bit 27 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr59(&self) -> MR59_R {
MR59_R::new(((self.bits >> 27) & 1) != 0)
}
#[doc = "Bit 28 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr60(&self) -> MR60_R {
MR60_R::new(((self.bits >> 28) & 1) != 0)
}
#[doc = "Bit 29 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr61(&self) -> MR61_R {
MR61_R::new(((self.bits >> 29) & 1) != 0)
}
#[doc = "Bit 30 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr62(&self) -> MR62_R {
MR62_R::new(((self.bits >> 30) & 1) != 0)
}
#[doc = "Bit 31 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
pub fn mr63(&self) -> MR63_R {
MR63_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr32(&mut self) -> MR32_W<C2EMR2_SPEC, 0> {
MR32_W::new(self)
}
#[doc = "Bit 1 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr33(&mut self) -> MR33_W<C2EMR2_SPEC, 1> {
MR33_W::new(self)
}
#[doc = "Bit 2 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr34(&mut self) -> MR34_W<C2EMR2_SPEC, 2> {
MR34_W::new(self)
}
#[doc = "Bit 3 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr35(&mut self) -> MR35_W<C2EMR2_SPEC, 3> {
MR35_W::new(self)
}
#[doc = "Bit 4 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr36(&mut self) -> MR36_W<C2EMR2_SPEC, 4> {
MR36_W::new(self)
}
#[doc = "Bit 5 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr37(&mut self) -> MR37_W<C2EMR2_SPEC, 5> {
MR37_W::new(self)
}
#[doc = "Bit 6 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr38(&mut self) -> MR38_W<C2EMR2_SPEC, 6> {
MR38_W::new(self)
}
#[doc = "Bit 7 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr39(&mut self) -> MR39_W<C2EMR2_SPEC, 7> {
MR39_W::new(self)
}
#[doc = "Bit 8 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr40(&mut self) -> MR40_W<C2EMR2_SPEC, 8> {
MR40_W::new(self)
}
#[doc = "Bit 9 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr41(&mut self) -> MR41_W<C2EMR2_SPEC, 9> {
MR41_W::new(self)
}
#[doc = "Bit 10 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr42(&mut self) -> MR42_W<C2EMR2_SPEC, 10> {
MR42_W::new(self)
}
#[doc = "Bit 11 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr43(&mut self) -> MR43_W<C2EMR2_SPEC, 11> {
MR43_W::new(self)
}
#[doc = "Bit 12 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr44(&mut self) -> MR44_W<C2EMR2_SPEC, 12> {
MR44_W::new(self)
}
#[doc = "Bit 14 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr46(&mut self) -> MR46_W<C2EMR2_SPEC, 14> {
MR46_W::new(self)
}
#[doc = "Bit 15 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr47(&mut self) -> MR47_W<C2EMR2_SPEC, 15> {
MR47_W::new(self)
}
#[doc = "Bit 16 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr48(&mut self) -> MR48_W<C2EMR2_SPEC, 16> {
MR48_W::new(self)
}
#[doc = "Bit 17 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr49(&mut self) -> MR49_W<C2EMR2_SPEC, 17> {
MR49_W::new(self)
}
#[doc = "Bit 18 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr50(&mut self) -> MR50_W<C2EMR2_SPEC, 18> {
MR50_W::new(self)
}
#[doc = "Bit 19 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr51(&mut self) -> MR51_W<C2EMR2_SPEC, 19> {
MR51_W::new(self)
}
#[doc = "Bit 20 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr52(&mut self) -> MR52_W<C2EMR2_SPEC, 20> {
MR52_W::new(self)
}
#[doc = "Bit 21 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr53(&mut self) -> MR53_W<C2EMR2_SPEC, 21> {
MR53_W::new(self)
}
#[doc = "Bit 22 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr54(&mut self) -> MR54_W<C2EMR2_SPEC, 22> {
MR54_W::new(self)
}
#[doc = "Bit 23 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr55(&mut self) -> MR55_W<C2EMR2_SPEC, 23> {
MR55_W::new(self)
}
#[doc = "Bit 24 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr56(&mut self) -> MR56_W<C2EMR2_SPEC, 24> {
MR56_W::new(self)
}
#[doc = "Bit 25 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr57(&mut self) -> MR57_W<C2EMR2_SPEC, 25> {
MR57_W::new(self)
}
#[doc = "Bit 26 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr58(&mut self) -> MR58_W<C2EMR2_SPEC, 26> {
MR58_W::new(self)
}
#[doc = "Bit 27 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr59(&mut self) -> MR59_W<C2EMR2_SPEC, 27> {
MR59_W::new(self)
}
#[doc = "Bit 28 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr60(&mut self) -> MR60_W<C2EMR2_SPEC, 28> {
MR60_W::new(self)
}
#[doc = "Bit 29 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr61(&mut self) -> MR61_W<C2EMR2_SPEC, 29> {
MR61_W::new(self)
}
#[doc = "Bit 30 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr62(&mut self) -> MR62_W<C2EMR2_SPEC, 30> {
MR62_W::new(self)
}
#[doc = "Bit 31 - CPU2 interrupt Mask on Direct Event input x+32"]
#[inline(always)]
#[must_use]
pub fn mr63(&mut self) -> MR63_W<C2EMR2_SPEC, 31> {
MR63_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "CPU2 EXTI event mask register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`c2emr2::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`c2emr2::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct C2EMR2_SPEC;
impl crate::RegisterSpec for C2EMR2_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`c2emr2::R`](R) reader structure"]
impl crate::Readable for C2EMR2_SPEC {}
#[doc = "`write(|w| ..)` method takes [`c2emr2::W`](W) writer structure"]
impl crate::Writable for C2EMR2_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets C2EMR2 to value 0"]
impl crate::Resettable for C2EMR2_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
mod schip8;
use schip8::SChip8;
use sdl2::{audio, event, keyboard::Keycode, pixels};
use std::{collections::HashMap, env, fs, io, time::Duration, time::SystemTime};
// Squarewave for audio output
struct SquareWave {
phase_inc: f32,
phase: f32,
volume: f32,
}
impl audio::AudioCallback for SquareWave {
// Data channel
type Channel = f32;
fn callback(&mut self, out: &mut [f32]) {
// Generate square wave
for x in out.iter_mut() {
if self.phase <= 0.5 {
*x = self.volume;
} else {
*x = -self.volume;
};
self.phase = (self.phase + self.phase_inc) % 1.0;
}
}
}
fn main() -> Result<(), io::Error> {
let args: Vec<String> = env::args().collect();
if args.len() == 1 {
println!("Usage: chip8-oxidized <file-path>");
return Err(io::Error::new(io::ErrorKind::Other, "Other"));
}
let error_message = format!("Unable to open {}", args[1]);
let file: Vec<u8> = fs::read(&args[1]).expect(error_message.as_str());
println!("{} is {} byte long", &args[1], file.len());
let mut schip8 = SChip8::new(file.clone());
let sdl2_context = sdl2::init().expect("Failed to initialize SDL");
let sdl2_audio_system = sdl2_context.audio().unwrap();
let sdl2_video_system = sdl2_context.video().unwrap();
println!(
"SDL2 version: {}.{}.{}",
sdl2::version::version(),
sdl2::version::revision(),
sdl2::version::revision_number()
);
let key_bindings: HashMap<Keycode, usize> = [
(Keycode::Num0, 0x0),
(Keycode::Num1, 0x1),
(Keycode::Num2, 0x2),
(Keycode::Num3, 0x3),
(Keycode::Num4, 0x4),
(Keycode::Num5, 0x5),
(Keycode::Num6, 0x6),
(Keycode::Num7, 0x7),
(Keycode::Num8, 0x8),
(Keycode::Num9, 0x9),
(Keycode::Kp0, 0x0),
(Keycode::Kp1, 0x1),
(Keycode::Kp2, 0x2),
(Keycode::Kp3, 0x3),
(Keycode::Kp4, 0x4),
(Keycode::Kp5, 0x5),
(Keycode::Kp6, 0x6),
(Keycode::Kp7, 0x7),
(Keycode::Kp8, 0x8),
(Keycode::Kp9, 0x9),
(Keycode::A, 0xA),
(Keycode::B, 0xB),
(Keycode::C, 0xC),
(Keycode::D, 0xD),
(Keycode::E, 0xE),
(Keycode::F, 0xF)
].iter().cloned().collect();
let spec = audio::AudioSpecDesired {
channels: Some(1),
freq: Some(44100),
samples: None,
};
let audio_device = sdl2_audio_system
.open_playback(None, &spec, |spec| {
return SquareWave {
phase: 0.0,
phase_inc: 440.0 / spec.freq as f32,
volume: 0.10,
};
})
.unwrap();
let window_width: u32 = 1280;
let window_height: u32 = 640;
let window = sdl2_video_system
.window(
["chip8-oxidized", &args[1]].join(" - ").as_str(),
window_width,
window_height,
)
.resizable()
.build()
.unwrap();
let mut canvas = window
.into_canvas()
.accelerated()
.present_vsync()
.build()
.unwrap();
let texture_creator = canvas.texture_creator();
let mut event_pump = sdl2_context.event_pump().unwrap();
let mut redraw = true;
let mut key = 0;
let mut time = SystemTime::now();
'running: loop {
for event in event_pump.poll_iter() {
use event::Event::*;
match event {
Quit { .. } => {
break 'running;
}
KeyDown { keycode, .. } => {
if keycode != None {
let code = keycode.unwrap();
key = 0;
match key_bindings.get(&code) {
Some(binding) => {
schip8.key_pad[*binding] = true;
key = *binding;
}
None => {}
}
}
}
KeyUp { keycode, .. } => {
if keycode != None {
let code = keycode.unwrap();
match key_bindings.get(&code) {
Some(binding) => schip8.key_pad[*binding] = false,
None => {}
}
}
}
_ => {}
}
}
if !schip8.run(key, &mut redraw) {
break;
}
let end = SystemTime::now().duration_since(time).unwrap();
if end.as_millis() >= 16 {
if schip8.dt > 0 {
schip8.dt -= 1;
}
if schip8.st > 0 {
schip8.st -= 1;
audio_device.resume();
if schip8.st == 0 {
audio_device.pause();
}
}
time = SystemTime::now();
}
if redraw {
canvas.clear();
let mut texture = texture_creator
.create_texture_streaming(
pixels::PixelFormatEnum::RGB24,
schip8.screen_width as u32,
schip8.screen_height as u32,
)
.unwrap();
let num_pixels = schip8.screen_width * schip8.screen_height;
let mut texture_data: Vec<u8> = vec![0; num_pixels * 3];
for i in 0..num_pixels {
let mut color = 0x00;
let pixel = schip8.screen[i];
if pixel == 1 {
color = 0xFF;
}
texture_data[i * 3] = color;
texture_data[i * 3 + 1] = color;
texture_data[i * 3 + 2] = color;
}
texture
.update(None, &texture_data, (schip8.screen_width * 3) as usize)
.unwrap();
canvas.copy(&texture, None, None).unwrap();
canvas.present();
redraw = false;
}
// Arbitrarily chosen duration of 1ms
std::thread::sleep(Duration::from_millis(1));
}
return Ok(());
}
|
pub mod selfupdate;
pub mod watch;
#[cfg(windows)]
pub mod windows;
use anyhow::{anyhow, Result};
use log::{info, warn};
use pahkat_client::{
config::RepoRecord, package_store::SharedStoreConfig, PackageKey, PackageStatus, PackageStore,
};
use std::convert::TryFrom;
use std::sync::Arc;
use std::time::Duration;
use tokio::time;
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("IO error")]
Io(#[from] std::io::Error),
#[error("Path error")]
Path(#[from] pathos::Error),
#[error("Set logger error")]
SetLoggerError(#[from] log::SetLoggerError),
}
pub fn setup_logger(name: &str) -> Result<(), Error> {
let log_path = pahkat_client::defaults::log_path()?;
std::fs::create_dir_all(&log_path)?;
fern::Dispatch::new()
.format(|out, message, record| {
out.finish(format_args!(
"[{} {:<5} {}] {}",
chrono::Utc::now().to_rfc3339_opts(chrono::SecondsFormat::Millis, true),
record.level(),
record.target(),
message
))
})
.level(log::LevelFilter::Info)
.level_for("pahkat_rpc", log::LevelFilter::Debug)
.level_for("pahkat_client", log::LevelFilter::Debug)
.chain(std::io::stdout())
.chain(fern::log_file(log_path.join(format!("{}.log", name)))?)
.apply()?;
log::debug!("logging initialized");
log::debug!("Log path: {}", log_path.display());
Ok(())
}
|
#[doc = "Register `ITLINE11` reader"]
pub type R = crate::R<ITLINE11_SPEC>;
#[doc = "Field `DMAMUX` reader - DMAMUX"]
pub type DMAMUX_R = crate::BitReader;
#[doc = "Field `DMA1_CH4` reader - DMA1_CH4"]
pub type DMA1_CH4_R = crate::BitReader;
#[doc = "Field `DMA1_CH5` reader - DMA1_CH5"]
pub type DMA1_CH5_R = crate::BitReader;
#[doc = "Field `DMA1_CH6` reader - DMA1_CH6"]
pub type DMA1_CH6_R = crate::BitReader;
#[doc = "Field `DMA1_CH7` reader - DMA1_CH7"]
pub type DMA1_CH7_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - DMAMUX"]
#[inline(always)]
pub fn dmamux(&self) -> DMAMUX_R {
DMAMUX_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - DMA1_CH4"]
#[inline(always)]
pub fn dma1_ch4(&self) -> DMA1_CH4_R {
DMA1_CH4_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - DMA1_CH5"]
#[inline(always)]
pub fn dma1_ch5(&self) -> DMA1_CH5_R {
DMA1_CH5_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - DMA1_CH6"]
#[inline(always)]
pub fn dma1_ch6(&self) -> DMA1_CH6_R {
DMA1_CH6_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - DMA1_CH7"]
#[inline(always)]
pub fn dma1_ch7(&self) -> DMA1_CH7_R {
DMA1_CH7_R::new(((self.bits >> 4) & 1) != 0)
}
}
#[doc = "interrupt line 11 status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`itline11::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct ITLINE11_SPEC;
impl crate::RegisterSpec for ITLINE11_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`itline11::R`](R) reader structure"]
impl crate::Readable for ITLINE11_SPEC {}
#[doc = "`reset()` method sets ITLINE11 to value 0"]
impl crate::Resettable for ITLINE11_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use coverage::{FunctionCov, ProcessCov};
use coverage::RangeCov;
use coverage::ScriptCov;
use range_tree::RangeTree;
use range_tree::RangeTreeArena;
use rayon::prelude::*;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::iter::Peekable;
pub fn merge_processes(mut processes: Vec<ProcessCov>) -> Option<ProcessCov> {
if processes.len() <= 1 {
return processes.pop();
}
let mut url_to_scripts: BTreeMap<String, Vec<ScriptCov>> = BTreeMap::new();
for process_cov in processes {
for script_cov in process_cov.result {
url_to_scripts
.entry(script_cov.url.clone())
.or_insert(Vec::new())
.push(script_cov);
}
}
let result: Vec<(usize, Vec<ScriptCov>)> = url_to_scripts
.into_iter()
.enumerate()
.map(|(script_id, (_, scripts))| (script_id, scripts))
.collect();
let result: Vec<ScriptCov> = result
// .into_par_iter()
.par_iter()
// .into_iter()
.map(|(script_id, scripts)| {
let mut merged: ScriptCov = merge_scripts(scripts.to_vec()).unwrap();
merged.script_id = script_id.to_string();
merged
})
.collect();
Some(ProcessCov { result })
}
pub fn merge_scripts(mut scripts: Vec<ScriptCov>) -> Option<ScriptCov> {
if scripts.len() <= 1 {
return scripts.pop();
}
let (script_id, url) = {
let first: &ScriptCov = &scripts[0];
(first.script_id.clone(), first.url.clone())
};
let mut range_to_funcs: BTreeMap<Range, Vec<FunctionCov>> = BTreeMap::new();
for script_cov in scripts {
for func_cov in script_cov.functions {
let root_range = {
let root_range_cov: &RangeCov = &func_cov.ranges[0];
Range { start: root_range_cov.start_offset, end: root_range_cov.end_offset }
};
range_to_funcs
.entry(root_range)
.or_insert(Vec::new())
.push(func_cov);
}
}
let functions: Vec<FunctionCov> = range_to_funcs
.into_iter()
.map(|(_, funcs)| merge_functions(funcs).unwrap())
.collect();
Some(ScriptCov { script_id, url, functions })
}
#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)]
struct Range {
start: usize,
end: usize,
}
impl Ord for Range {
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
if self.start != other.start {
self.start.cmp(&other.start)
} else {
other.end.cmp(&self.end)
}
}
}
impl PartialOrd for Range {
fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> {
if self.start != other.start {
self.start.partial_cmp(&other.start)
} else {
other.end.partial_cmp(&self.end)
}
}
}
pub fn merge_functions(mut funcs: Vec<FunctionCov>) -> Option<FunctionCov> {
if funcs.len() <= 1 {
return funcs.pop();
}
let function_name = funcs[0].function_name.clone();
let rta_capacity: usize = funcs.iter().fold(0, |acc, func| acc + func.ranges.len());
let rta = RangeTreeArena::with_capacity(rta_capacity);
let mut trees: Vec<&mut RangeTree> = Vec::new();
for func in funcs {
if let Some(tree) = RangeTree::from_sorted_ranges(&rta, &func.ranges) {
trees.push(tree);
}
}
let merged = RangeTree::normalize(&rta, merge_range_trees(&rta, trees).unwrap());
let ranges = merged.to_ranges();
let is_block_coverage: bool = !(ranges.len() == 1 && ranges[0].count == 0);
Some(FunctionCov { function_name, ranges, is_block_coverage })
}
fn merge_range_trees<'a>(rta: &'a RangeTreeArena<'a>, mut trees: Vec<&'a mut RangeTree<'a>>) -> Option<&'a mut RangeTree<'a>> {
if trees.len() <= 1 {
return trees.pop();
}
let (start, end) = {
let first = &trees[0];
(first.start, first.end)
};
let delta: i64 = trees.iter().fold(0, |acc, tree| acc + tree.delta);
let children = merge_range_tree_children(rta, trees);
Some(rta.alloc(RangeTree::new(start, end, delta, children)))
}
struct StartEvent<'a> {
offset: usize,
trees: Vec<(usize, &'a mut RangeTree<'a>)>,
}
fn into_start_events<'a>(trees: Vec<&'a mut RangeTree<'a>>) -> Vec<StartEvent> {
let mut result: BTreeMap<usize, Vec<(usize, &'a mut RangeTree<'a>)>> = BTreeMap::new();
for (parent_index, tree) in trees.into_iter().enumerate() {
for child in tree.children.drain(..) {
result
.entry(child.start)
.or_insert(Vec::new())
.push((parent_index, child));
}
}
result
.into_iter()
.map(|(offset, trees)| StartEvent { offset, trees })
.collect()
}
struct StartEventQueue<'a> {
pending: Option<StartEvent<'a>>,
queue: Peekable<::std::vec::IntoIter<StartEvent<'a>>>,
}
impl<'a> StartEventQueue<'a> {
pub fn new(queue: Vec<StartEvent<'a>>) -> StartEventQueue<'a> {
StartEventQueue {
pending: None,
queue: queue.into_iter().peekable(),
}
}
pub(crate) fn set_pending_offset(&mut self, offset: usize) -> () {
self.pending = Some(StartEvent { offset, trees: Vec::new() });
}
pub(crate) fn push_pending_tree(&mut self, tree: (usize, &'a mut RangeTree<'a>)) -> () {
self.pending = self.pending.take().map(|mut start_event| {
start_event.trees.push(tree);
start_event
});
}
}
impl<'a> Iterator for StartEventQueue<'a> {
type Item = StartEvent<'a>;
fn next(&mut self) -> Option<<Self as Iterator>::Item> {
let pending_offset: Option<usize> = match &self.pending {
Some(ref start_event) if !start_event.trees.is_empty() => Some(start_event.offset),
_ => None,
};
match pending_offset {
Some(pending_offset) => {
let queue_offset = self.queue.peek().map(|start_event| start_event.offset);
match queue_offset {
None => self.pending.take(),
Some(queue_offset) => {
if pending_offset < queue_offset {
self.pending.take()
} else {
let mut result = self.queue.next().unwrap();
if pending_offset == queue_offset {
let pending_trees = self.pending.take().unwrap().trees;
result.trees.extend(pending_trees.into_iter())
}
Some(result)
}
}
}
}
None => self.queue.next(),
}
}
}
fn merge_range_tree_children<'a>(rta: &'a RangeTreeArena<'a>, parent_trees: Vec<&'a mut RangeTree<'a>>) -> Vec<&'a mut RangeTree<'a>> {
let mut flat_children: Vec<Vec<&'a mut RangeTree<'a>>> = Vec::with_capacity(parent_trees.len());
let mut wrapped_children: Vec<Vec<&'a mut RangeTree<'a>>> = Vec::with_capacity(parent_trees.len());
let mut open_range: Option<Range> = None;
for parent_tree in parent_trees.iter() {
flat_children.push(Vec::new());
wrapped_children.push(Vec::new());
}
let mut start_event_queue = StartEventQueue::new(into_start_events(parent_trees));
let mut parent_to_nested: HashMap<usize, Vec<&'a mut RangeTree<'a>>> = HashMap::new();
while let Some(event) = start_event_queue.next() {
open_range = if let Some(open_range) = open_range {
if open_range.end <= event.offset {
for (parent_index, nested) in parent_to_nested {
wrapped_children[parent_index].push(rta.alloc(RangeTree::new(
open_range.start,
open_range.end,
0,
nested,
)));
}
parent_to_nested = HashMap::new();
None
} else {
Some(open_range)
}
} else {
None
};
match open_range {
Some(open_range) => {
for (parent_index, mut tree) in event.trees {
let child = if tree.end > open_range.end {
let (left, right) = RangeTree::split(rta, tree, open_range.end);
start_event_queue.push_pending_tree((parent_index, right));
left
} else {
tree
};
parent_to_nested
.entry(parent_index)
.or_insert(Vec::new())
.push(child);
}
}
None => {
let mut open_range_end: usize = event.offset + 1;
for (_, ref tree) in &event.trees {
open_range_end = if tree.end > open_range_end { tree.end } else { open_range_end };
}
for (parent_index, tree) in event.trees {
if tree.end == open_range_end {
flat_children[parent_index].push(tree);
continue;
}
parent_to_nested
.entry(parent_index)
.or_insert(Vec::new())
.push(tree);
}
start_event_queue.set_pending_offset(open_range_end);
open_range = Some(Range { start: event.offset, end: open_range_end });
}
}
}
if let Some(open_range) = open_range {
for (parent_index, nested) in parent_to_nested {
wrapped_children[parent_index].push(rta.alloc(RangeTree::new(
open_range.start,
open_range.end,
0,
nested,
)));
}
}
let child_forests: Vec<Vec<&'a mut RangeTree<'a>>> = flat_children.into_iter()
.zip(wrapped_children.into_iter())
.map(|(flat, wrapped)| merge_children_lists(flat, wrapped))
.collect();
let events = get_child_events_from_forests(&child_forests);
let mut child_forests: Vec<Peekable<::std::vec::IntoIter<&'a mut RangeTree<'a>>>> = child_forests.into_iter()
.map(|forest| forest.into_iter().peekable())
.collect();
let mut result: Vec<&'a mut RangeTree<'a>> = Vec::new();
for event in events.iter() {
let mut matching_trees: Vec<&'a mut RangeTree<'a>> = Vec::new();
for (parent_index, children) in child_forests.iter_mut().enumerate() {
let next_tree: Option<&'a mut RangeTree<'a>> = {
if children.peek().map_or(false, |tree| tree.start == *event) {
children.next()
} else {
None
}
};
if let Some(next_tree) = next_tree {
matching_trees.push(next_tree);
}
}
if let Some(mut merged) = merge_range_trees(rta, matching_trees) {
result.push(merged);
}
}
result
}
fn get_child_events_from_forests<'a>(forests: &Vec<Vec<&'a mut RangeTree<'a>>>) -> BTreeSet<usize> {
let mut event_set: BTreeSet<usize> = BTreeSet::new();
for forest in forests {
for tree in forest {
event_set.insert(tree.start);
event_set.insert(tree.end);
}
}
event_set
}
// TODO: itertools?
// https://play.integer32.com/?gist=ad2cd20d628e647a5dbdd82e68a15cb6&version=stable&mode=debug&edition=2015
fn merge_children_lists<'a>(a: Vec<&'a mut RangeTree<'a>>, b: Vec<&'a mut RangeTree<'a>>) -> Vec<&'a mut RangeTree<'a>> {
let mut merged: Vec<&'a mut RangeTree<'a>> = Vec::new();
let mut a = a.into_iter();
let mut b = b.into_iter();
let mut next_a = a.next();
let mut next_b = b.next();
loop {
match (next_a, next_b) {
(Some(tree_a), Some(tree_b)) => {
if tree_a.start < tree_b.start {
merged.push(tree_a);
next_a = a.next();
next_b = Some(tree_b);
} else {
merged.push(tree_b);
next_a = Some(tree_a);
next_b = b.next();
}
}
(Some(tree_a), None) => {
merged.push(tree_a);
merged.extend(a);
break;
}
(None, Some(tree_b)) => {
merged.push(tree_b);
merged.extend(b);
break;
}
(None, None) => break,
}
}
merged
}
#[cfg(test)]
mod tests {
use coverage::FunctionCov;
use coverage::ProcessCov;
use coverage::RangeCov;
use coverage::ScriptCov;
use super::merge_processes;
#[test]
fn empty() {
let inputs: Vec<ProcessCov> = Vec::new();
let expected: Option<ProcessCov> = None;
assert_eq!(merge_processes(inputs), expected);
}
#[test]
fn two_flat_trees() {
let inputs: Vec<ProcessCov> = vec![
ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 9, count: 1 },
],
}
],
}
]
},
ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 9, count: 2 },
],
}
],
}
]
}
];
let expected: Option<ProcessCov> = Some(ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 9, count: 3 },
],
}
],
}
]
});
assert_eq!(merge_processes(inputs), expected);
}
#[test]
fn two_trees_with_matching_children() {
let inputs: Vec<ProcessCov> = vec![
ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 9, count: 10 },
RangeCov { start_offset: 3, end_offset: 6, count: 1 },
],
}
],
}
]
},
ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 9, count: 20 },
RangeCov { start_offset: 3, end_offset: 6, count: 2 },
],
}
],
}
]
}
];
let expected: Option<ProcessCov> = Some(ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 9, count: 30 },
RangeCov { start_offset: 3, end_offset: 6, count: 3 },
],
}
],
}
]
});
assert_eq!(merge_processes(inputs), expected);
}
#[test]
fn two_trees_with_partially_overlapping_children() {
let inputs: Vec<ProcessCov> = vec![
ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 9, count: 10 },
RangeCov { start_offset: 2, end_offset: 5, count: 1 },
],
}
],
}
]
},
ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 9, count: 20 },
RangeCov { start_offset: 4, end_offset: 7, count: 2 },
],
}
],
}
]
}
];
let expected: Option<ProcessCov> = Some(ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 9, count: 30 },
RangeCov { start_offset: 2, end_offset: 5, count: 21 },
RangeCov { start_offset: 4, end_offset: 5, count: 3 },
RangeCov { start_offset: 5, end_offset: 7, count: 12 },
],
}
],
}
]
});
assert_eq!(merge_processes(inputs), expected);
}
#[test]
fn two_trees_with_with_complementary_children_summing_to_the_same_count() {
let inputs: Vec<ProcessCov> = vec![
ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 9, count: 1 },
RangeCov { start_offset: 1, end_offset: 8, count: 6 },
RangeCov { start_offset: 1, end_offset: 5, count: 5 },
RangeCov { start_offset: 5, end_offset: 8, count: 7 },
],
}
],
}
]
},
ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 9, count: 4 },
RangeCov { start_offset: 1, end_offset: 8, count: 8 },
RangeCov { start_offset: 1, end_offset: 5, count: 9 },
RangeCov { start_offset: 5, end_offset: 8, count: 7 },
],
}
],
}
]
}
];
let expected: Option<ProcessCov> = Some(ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 9, count: 5 },
RangeCov { start_offset: 1, end_offset: 8, count: 14 },
],
}
],
}
]
});
assert_eq!(merge_processes(inputs), expected);
}
#[test]
fn merges_a_similar_sliding_chain_a_bc() {
let inputs: Vec<ProcessCov> = vec![
ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 7, count: 10 },
RangeCov { start_offset: 0, end_offset: 4, count: 1 },
],
}
],
}
]
},
ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 7, count: 20 },
RangeCov { start_offset: 1, end_offset: 6, count: 11 },
RangeCov { start_offset: 2, end_offset: 5, count: 2 },
],
}
],
}
]
}
];
let expected: Option<ProcessCov> = Some(ProcessCov {
result: vec![
ScriptCov {
script_id: String::from("0"),
url: String::from("/lib.js"),
functions: vec![
FunctionCov {
function_name: String::from("lib"),
is_block_coverage: true,
ranges: vec![
RangeCov { start_offset: 0, end_offset: 7, count: 30 },
RangeCov { start_offset: 0, end_offset: 6, count: 21 },
RangeCov { start_offset: 1, end_offset: 5, count: 12 },
RangeCov { start_offset: 2, end_offset: 4, count: 3 },
],
}
],
}
]
});
assert_eq!(merge_processes(inputs), expected);
}
}
|
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate derive_new;
pub mod http;
pub mod rtm;
mod id;
pub use self::id::*;
mod timestamp;
pub use self::timestamp::Timestamp;
fn serialize_comma_separated<T, S>(items: &[T], serializer: S) -> Result<S::Ok, S::Error>
where
S: ::serde::Serializer,
T: ::serde::Serialize + ::std::fmt::Display,
{
use std::fmt::Write;
let mut output = String::with_capacity(items.len() * (ID_LENGTH + 1));
for item in items {
let _ = write!(output, "{},", item);
}
output.pop(); // Remove last comma, does nothing if output is empty
// Create a string that we can then serialize
serializer.serialize_str(&output)
}
|
#![recursion_limit = "1024"]
#![allow(clippy::eval_order_dependence)]
// extern crate wasm_bindgen;
// extern crate web_sys;
// extern crate yew;
// extern crate yew_router;
use wasm_bindgen::prelude::*;
use web_logger;
pub mod app;
pub mod components;
pub mod routes;
use app::App;
// Use `wee_alloc` as the global allocator.
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
// Called by our JS entry point to run the example
#[wasm_bindgen]
pub fn run_app() -> Result<(), JsValue> {
web_logger::init();
yew::start_app::<App>();
Ok(())
}
|
// use chrono::*;
// use nom::*;
pub mod parse_fns;
/// This contains the various structs used to represent the AST
#[derive(Clone,Debug,Hash,Eq,PartialEq,Ord,PartialOrd)]
pub struct Identifier(pub String);
impl FromStr for Identifier {
type Err = ();
fn from_str(s:&str) -> Result<Identifier,Self::Err> {
Ok(Identifier(String::from(s)))
}
}
impl From<String> for Identifier {
fn from(s:String) -> Identifier{
Identifier(s)
}
}
impl fmt::Display for Identifier {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
write!(format,"{}",self.0)
}
}
#[derive(Clone,Debug,Hash,Eq,PartialEq,Ord,PartialOrd)]
pub enum Variable {
Hole,
Name(Identifier)
}
impl Variable {
pub fn from_str(input:& str) -> Result<Variable,()> {
if input == "true" || input == "false" || input == "output" {
return Err(())
}
if input == "_" {
Ok(Variable::Hole)
} else {
Ok(Variable::Name(Identifier::from_str(input).unwrap()))
}
}
}
use std::fmt;
impl fmt::Display for Variable {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
match self {
&Variable::Name(ref name) => write!(format, "{}", name),
&Variable::Hole => write!(format,"_")
}
}
}
/// Expressions -
/// Expressions need to
/// 1) be able to represent an evaluable function
/// 2) Propagate the optimal type up to the top for evaluation
/// 3) Propogate a list of Vars up to the top.
#[derive(PartialEq,Eq,Clone,Debug)]
pub enum ParseErrors {
NumericParseFailed,
UTF8ConversionError
}
#[derive(PartialEq,Clone,Debug)]
pub enum Number {
Int(i64),
Float(f64)
}
impl fmt::Display for Number {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
match self {
&Number::Int(x) => write!(format, "{}", x),
&Number::Float(x) => write!(format, "{}", x),
}
}
}
#[derive(PartialEq,Clone,Debug)]
pub enum BinaryOperator {
Add,
Subtract,
Multiply,
Divide,
Modulus,
And,
Or,
Xor,
LessThan,
LessThanEq,
Eq,
NotEq,
GreaterThanEq,
GreaterThan
}
impl fmt::Display for BinaryOperator {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
match self {
&BinaryOperator::Add => write!(format, "+"),
&BinaryOperator::Subtract => write!(format, "-"),
&BinaryOperator::Multiply => write!(format, "*"),
&BinaryOperator::Divide => write!(format, "/"),
&BinaryOperator::Modulus => write!(format, "%"),
&BinaryOperator::And => write!(format, "&"),
&BinaryOperator::Or => write!(format, "|"),
&BinaryOperator::Xor => write!(format, "^"),
&BinaryOperator::LessThan => write!(format, "<"),
&BinaryOperator::LessThanEq => write!(format, "<="),
&BinaryOperator::Eq => write!(format, "=="),
&BinaryOperator::NotEq => write!(format, "!="),
&BinaryOperator::GreaterThanEq => write!(format, ">="),
&BinaryOperator::GreaterThan => write!(format, ">"),
}
}
}
use std::str::FromStr;
impl FromStr for BinaryOperator {
type Err = ();
fn from_str(s:&str) -> Result<BinaryOperator,Self::Err> {
match s {
"+" => Ok(BinaryOperator::Add),
"-" => Ok(BinaryOperator::Subtract),
"*" => Ok(BinaryOperator::Multiply),
"/" => Ok(BinaryOperator::Divide),
"%" => Ok(BinaryOperator::Modulus),
"&" => Ok(BinaryOperator::And),
"|" => Ok(BinaryOperator::Or),
"^" => Ok(BinaryOperator::Xor),
"<" => Ok(BinaryOperator::LessThan),
"<=" => Ok(BinaryOperator::LessThanEq),
"==" => Ok(BinaryOperator::Eq),
"!=" => Ok(BinaryOperator::NotEq),
">=" => Ok(BinaryOperator::GreaterThanEq),
">" => Ok(BinaryOperator::GreaterThan),
_ => Err(())
}
}
}
#[derive(PartialEq,Clone,Debug)]
pub enum UnaryOperator {
ArithNegate,
BoolNegate
}
impl FromStr for UnaryOperator {
type Err = ();
fn from_str(s:&str) -> Result<UnaryOperator,Self::Err> {
match s.chars().next() {
Some('-') => Ok(UnaryOperator::ArithNegate),
Some('!') => Ok(UnaryOperator::BoolNegate),
_ => Err(())
}
}
}
impl fmt::Display for UnaryOperator {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
match self {
&UnaryOperator::ArithNegate => write!(format, "-"),
&UnaryOperator::BoolNegate => write!(format, "!"),
}
}
}
#[derive(PartialEq,Clone,Debug)]
pub enum Expr {
Value(Literal),
Variable(Variable),
Paren(Box<Expr>),
BinaryResult((Box<Expr>,BinaryOperator,Box<Expr>)),
UnaryResult((UnaryOperator,Box<Expr>))
}
impl Expr {
pub fn to_unary(op:Option<UnaryOperator>,exp:Expr) -> Expr {
match (op,exp) {
(Some(UnaryOperator::ArithNegate),Expr::Value(Literal::Float(x))) => Expr::Value(Literal::Float(-x)),
(Some(UnaryOperator::ArithNegate),Expr::Value(Literal::Int(x))) => Expr::Value(Literal::Int(-x)),
(Some(o),Expr::BinaryResult(x)) => Expr::UnaryResult((o,Box::new(Expr::Paren(Box::new(Expr::BinaryResult(x)))))),
(Some(_),Expr::UnaryResult((_,x))) => *x,
(Some(o),e) => Expr::UnaryResult((o,Box::new(e))),
(None,e) => e
}
}
pub fn to_paren(exp:Expr) -> Expr {
match exp {
Expr::Paren(_) => exp,
_ => Expr::Paren(Box::new(exp))
}
}
}
impl fmt::Display for Expr {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
match self {
&Expr::Value(ref n) => write!(format, "{}",n),
&Expr::Variable(ref v) => write!(format, "{}",v),
&Expr::Paren(ref a) => write!(format, "({})",a),
&Expr::BinaryResult((ref l,ref o,ref r)) => write!(format, "{} {} {}",l ,o, r),
&Expr::UnaryResult((ref o,ref a)) => write!(format, "{}{}",o,a)
}
}
}
#[derive(PartialEq,Clone,Debug)]
pub struct Equation {
pub value: Variable,
pub expr: Expr
}
impl fmt::Display for Equation {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
write!(format, "{} = {}",self.value,self.expr)
}
}
use std::collections::Bound;
#[derive(PartialEq,Clone,Debug)]
pub struct SemiRange {
pub val: Variable,
pub lower: Bound<Term>,
pub upper: Bound<Term>
}
impl SemiRange {
pub fn new(var:Variable,lbt:&[u8],lb:Option<Term>,ubt:&[u8],ub:Option<Term>) -> SemiRange {
let lower_bound = if let Some(l) = lb {
if lbt == (b"[") {
Bound::Included(l)
} else {
Bound::Excluded(l)
}
} else { Bound::Unbounded};
let upper_bound = if let Some(u) = ub {
if ubt == (b"]") {
Bound::Included(u)
} else {
Bound::Excluded(u)
}
} else { Bound::Unbounded};
SemiRange{val:var,lower:lower_bound,upper:upper_bound}
}
}
impl fmt::Display for SemiRange {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
let _ = write!(format,"{} in ", self.val);
let _ = match self.lower {
Bound::Unbounded => write!(format, "(,"),
Bound::Included(ref x) => write!(format, "[{},",x),
Bound::Excluded(ref x) => write!(format, "({},",x)
};
let _ = match self.upper {
Bound::Unbounded => write!(format, ")"),
Bound::Included(ref x) => write!(format, "{}]",x),
Bound::Excluded(ref x) => write!(format, "{})",x)
};
write!(format, "")
}
}
#[derive(PartialEq,Clone,Debug)]
pub enum Literal {
Int(i64),
Float(f64),
String(String),
Bool(bool)
}
impl fmt::Display for Literal {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
match self {
&Literal::Int(ref x) => {write!(format,"{}",x)},
&Literal::Float(ref x) => {write!(format,"{:.64}",x)},
&Literal::String(ref x) => {write!(format,"\"{}\"",x
.replace("\\","\\\\")
.replace("\"","\\\""))},
&Literal::Bool(ref x) => {write!(format,"{}",x)}
}
}
}
#[derive(PartialEq,Clone,Debug)]
pub enum Term {
Literal(Literal),
Variable(Variable)
}
impl fmt::Display for Term {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
match self {
&Term::Literal(ref x) => {write!(format,"{}",x)},
&Term::Variable(ref x) => {write!(format,"{}",x)}
}
}
}
#[derive(PartialEq,Clone,Debug)]
pub struct RowFact {
pub head: Identifier,
pub terms: Vec<Term>
}
impl fmt::Display for RowFact {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
let _ = write!(format,"{}(", self.head);
let mut first = true;
for t in self.terms.iter() {
if !first {
let _ = write!(format, ",{}",t);
} else {
let _ = write!(format, "{}",t);
}
first = false;
}
write!(format,")")
}
}
// Compound [e {a1 v1 a2 [e2 {a3 v3 a4 [e3 a4 v4 t3]} t2]} t1]
// Leaf: [e a v t] -> [e {a v} t]
#[derive(PartialEq,Clone,Debug)]
pub enum TreeTerm {
Term(Term),
Tree(Box<TreeFact>)
}
impl fmt::Display for TreeTerm {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
match self {
&TreeTerm::Term(ref x) => {write!(format,"{}",x)},
&TreeTerm::Tree(ref x) => {write!(format,"{}",x)}
}
}
}
#[derive(PartialEq,Clone,Debug)]
pub struct TreeFact {
pub entity: Term,
pub avs: Vec<(Term,TreeTerm)>,
pub t: Term
}
impl fmt::Display for TreeFact {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
let _ = write!(format,"[{}",self.entity);
let mut first = true;
if self.avs.len() > 1 {
let _ = write!(format," {{");
}
for &(ref at,ref val) in self.avs.iter() {
if !first {
let _ = write!(format,",");
}
first = true;
let _ = write!(format," {} {}",at,val);
}
if self.avs.len() > 1 {
let _ = write!(format,"}}");
}
write!(format," {}]",self.t)
}
}
#[derive(PartialEq,Clone,Debug)]
pub struct Fact(pub Pred);
impl fmt::Display for Fact {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
write!(format,"{};",self.0)
}
}
#[derive(PartialEq,Clone,Debug)]
pub enum Pred {
RowFact(RowFact),
TreeFact(TreeFact),
Equation(Equation),
SemiRange(SemiRange)
}
impl fmt::Display for Pred {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
match self {
&Pred::RowFact(ref x) => {write!(format,"{}",x)},
&Pred::TreeFact(ref x) => {write!(format,"{}",x)},
&Pred::Equation(ref x) => {write!(format,"{}",x)},
&Pred::SemiRange(ref x) => {write!(format,"{}",x)},
}
}
}
#[derive(PartialEq,Clone,Debug)]
pub struct Relation {
pub head:Identifier,
pub vars: Vec<Variable>,
pub preps: Vec<Pred>
}
impl fmt::Display for Relation {
fn fmt(&self, format: &mut fmt::Formatter) -> fmt::Result {
let _ = write!(format,"{}(",self.head);
let mut first = true;
for v in self.vars.iter() {
if !first {
let _ = write!(format,",");
}
first = false;
let _ = write!(format,"{}",v);
}
let _ = write!(format,") :- ");
first = true;
for p in self.preps.iter() {
if !first {
let _ = write!(format,", ");
}
first = false;
let _ = write!(format, "{}",p);
}
write!(format,";")
}
}
#[cfg(test)]
mod test_support {
use quickcheck::{Arbitrary,Gen,StdGen,empty_shrinker,single_shrinker};
use rand::{ThreadRng,thread_rng};
use super::*;
fn smaller<G:Gen>(g:&G) -> StdGen<ThreadRng> {
StdGen::new(thread_rng(),g.size()-1)
}
fn between<T:Ord>(low:T,high:T,x:T) -> T {
if x < low {
low
} else if x > high {
high
} else {x}
}
impl Arbitrary for Identifier {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("ID>");
let s = g.size();
Identifier(g.gen_ascii_chars().skip_while(|c| c >= &'0' && c <= &'9' && *c != (0 as char)).take(s).collect())
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
if self.0.len() <= 1 {
empty_shrinker()
} else {
Box::new(self.0.shrink().filter(|x| {
if x.len() > 0 {
if let Some(c) = x.chars().next() {
(c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z')
} else {false}
} else {false}
})
.map(|x| Identifier(x)))
}
}
}
impl Arbitrary for Variable {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("VR>");
let hole:bool = g.gen();
if hole {
Variable::Hole
} else {
Variable::Name(Arbitrary::arbitrary(g))
}
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
match self {
&Variable::Hole => empty_shrinker(),
&Variable::Name(ref n) => {
let chain = single_shrinker(Variable::Hole).chain(n.shrink().map(Variable::Name));
Box::new(chain)
}
}
}
}
impl Arbitrary for Number {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("NR>");
if g.gen() {
Number::Int(Arbitrary::arbitrary(g))
} else {
Number::Float(Arbitrary::arbitrary(g))
}
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
match self {
&Number::Int(ref x) => {
Box::new(x.shrink().map(Number::Int))
},
&Number::Float(ref x) => {
Box::new(x.shrink().map(Number::Float))
}
}
}
}
impl Arbitrary for BinaryOperator {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("BO>");
let choices = vec![BinaryOperator::Add,
BinaryOperator::Subtract,
BinaryOperator::Multiply,
BinaryOperator::Divide,
BinaryOperator::Modulus,
BinaryOperator::And,
BinaryOperator::Or,
BinaryOperator::Xor,
BinaryOperator::LessThan,
BinaryOperator::LessThanEq,
BinaryOperator::Eq,
BinaryOperator::NotEq,
BinaryOperator::GreaterThanEq,
BinaryOperator::GreaterThan
];
g.choose(&choices).unwrap().clone()
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
empty_shrinker()
}
}
impl Arbitrary for UnaryOperator {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("UAO>");
let choices = vec![UnaryOperator::BoolNegate,UnaryOperator::ArithNegate];
g.choose(&choices).unwrap().clone()
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
empty_shrinker()
}
}
impl Arbitrary for Expr {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("AE>");
let mut ng = smaller(g);
let size = g.size();
match g.gen_range(0,between(0,5,size)) {
0 => {Expr::Value(Arbitrary::arbitrary(g))},
1 => {Expr::Variable(Arbitrary::arbitrary(g))},
2 => {Expr::to_paren(Arbitrary::arbitrary(&mut ng))},
3 => {Expr::BinaryResult((Box::new(Arbitrary::arbitrary(&mut ng)),Arbitrary::arbitrary(&mut ng),Box::new(Arbitrary::arbitrary(&mut ng))))},
4 => {Expr::to_unary(Arbitrary::arbitrary(g),Arbitrary::arbitrary(&mut ng))},
_ => panic!("Outside of specified range")
}
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
match *self {
Expr::Value(ref x) => {Box::new(x.shrink().map(Expr::Value))},
Expr::Variable(ref x) => {Box::new(x.shrink().map(Expr::Variable))},
Expr::Paren(ref x) => {Box::new(x.shrink().map(Box::new).map(Expr::Paren))},
Expr::BinaryResult((ref l,ref o, ref r)) => {
Box::new((l.as_ref().clone(),o.clone(),r.as_ref().clone()).shrink()
.map(|(a,b,c)| (Box::new(a),b,Box::new(c)))
.map(Expr::BinaryResult))
},
Expr::UnaryResult((ref o,ref r)) => {
Box::new((o.clone(),r.as_ref().clone()).shrink()
.map(|(o,r)| (o,Box::new(r)))
.map(Expr::UnaryResult))},
}
}
}
impl Arbitrary for Equation {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("EQ>");
Equation{value:Arbitrary::arbitrary(g),expr:Arbitrary::arbitrary(g)}
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
Box::new(
(self.value.clone(),self.expr.clone()).shrink().map(|(v,e)| {Equation{value:v,expr:e}})
)
}
}
impl Arbitrary for SemiRange {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("SR>");
let lower = match g.gen_range(0,3) {
0 => {Bound::Included(Arbitrary::arbitrary(g))},
1 => {Bound::Excluded(Arbitrary::arbitrary(g))},
2 => {Bound::Unbounded},
_ => panic!()
};
let upper = match g.gen_range(0,3) {
0 => {Bound::Included(Arbitrary::arbitrary(g))},
1 => {Bound::Excluded(Arbitrary::arbitrary(g))},
2 => {Bound::Unbounded},
_ => panic!()
};
SemiRange{val:Arbitrary::arbitrary(g),lower:lower,upper:upper}
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
match (self.lower.clone(),self.upper.clone()) {
(Bound::Unbounded,Bound::Unbounded) => {
Box::new(self.val.shrink()
.map(|v|SemiRange{val:v,lower:Bound::Unbounded,upper:Bound::Unbounded}))
},
(Bound::Unbounded,Bound::Included(ref u)) => {
Box::new((self.val.clone(),u.clone()).shrink()
.map(|(v,u)|
SemiRange{val:v,
lower:Bound::Unbounded,
upper:Bound::Included(u)}))
},
(Bound::Unbounded,Bound::Excluded(ref u)) => {
Box::new((self.val.clone(),u.clone()).shrink()
.map(|(v,u)|
SemiRange{val:v,
lower:Bound::Unbounded,
upper:Bound::Excluded(u)}))
},
(Bound::Included(ref l),Bound::Unbounded) => {
Box::new((self.val.clone(),l.clone()).shrink()
.map(|(v,l)|
SemiRange{val:v,
lower:Bound::Included(l),
upper:Bound::Unbounded}))
},
(Bound::Included(ref l),Bound::Included(ref u)) => {
Box::new((self.val.clone(),l.clone(),u.clone()).shrink()
.map(|(v,l,u)|
SemiRange{val:v,
lower:Bound::Included(l),
upper:Bound::Included(u)}))
},
(Bound::Included(ref l),Bound::Excluded(ref u)) => {
Box::new((self.val.clone(),l.clone(),u.clone()).shrink()
.map(|(v,l,u)|
SemiRange{val:v,
lower:Bound::Included(l),
upper:Bound::Excluded(u)}))
},
(Bound::Excluded(ref l),Bound::Unbounded) => {
Box::new((self.val.clone(),l.clone()).shrink()
.map(|(v,l)|
SemiRange{val:v,
lower:Bound::Excluded(l),
upper:Bound::Unbounded}))
},
(Bound::Excluded(ref l),Bound::Included(ref u)) => {
Box::new((self.val.clone(),l.clone(),u.clone()).shrink()
.map(|(v,l,u)|
SemiRange{val:v,
lower:Bound::Excluded(l),
upper:Bound::Included(u)}))
},
(Bound::Excluded(ref l),Bound::Excluded(ref u)) => {
Box::new((self.val.clone(),l.clone(),u.clone()).shrink()
.map(|(v,l,u)|
SemiRange{val:v,
lower:Bound::Excluded(l),
upper:Bound::Excluded(u)}))
},
}
}
}
impl Arbitrary for Literal {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("LR>");
match g.gen_range(0,4) {
0 => {Literal::Int(Arbitrary::arbitrary(g))},
1 => {Literal::Float(Arbitrary::arbitrary(g))},
2 => {Literal::String(Arbitrary::arbitrary(g))},
3 => {Literal::Bool(Arbitrary::arbitrary(g))},
_ => panic!()
}
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
match *self {
Literal::Int(ref x) => {Box::new(x.shrink().map(Literal::Int))},
Literal::Float(ref x) => {Box::new(x.shrink().map(Literal::Float))},
Literal::String(ref x) => {Box::new(x.shrink().map(Literal::String))},
Literal::Bool(ref x) => {Box::new(x.shrink().map(Literal::Bool))},
}
}
}
impl Arbitrary for Term {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("T>");
if g.gen() {
Term::Literal(Arbitrary::arbitrary(g))
} else {
Term::Variable(Arbitrary::arbitrary(g))
}
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
match *self {
Term::Literal(ref x) => {Box::new(x.shrink().map(Term::Literal))},
Term::Variable(ref x) => {Box::new(x.shrink().map(Term::Variable))},
}
}
}
impl Arbitrary for RowFact {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("RF>");
let mut ret = RowFact{head:Arbitrary::arbitrary(g),terms:Arbitrary::arbitrary(g)};
if ret.terms.len() == 0 {
ret.terms = vec![Term::Variable(Variable::Hole)];
}
ret
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
Box::new((self.head.clone(),self.terms.clone()).shrink()
.filter(|&(_,ref t)| t.len() > 0)
.map(|(h,t)| RowFact{head:h,terms:t}))
}
}
impl Arbitrary for TreeTerm {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("TT>");
if g.gen() && g.size() > 1 {
let mut ng = smaller(g);
TreeTerm::Tree(Box::new(Arbitrary::arbitrary(&mut ng)))
} else {
TreeTerm::Term(Arbitrary::arbitrary(g))
}
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
match *self {
TreeTerm::Term(ref x) => {Box::new(x.shrink().map(TreeTerm::Term))},
TreeTerm::Tree(ref x) => {Box::new(x.as_ref()
.shrink()
.map(|b| TreeTerm::Tree(Box::new(b))))},
}
}
}
impl Arbitrary for TreeFact {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("TF>");
let mut ret = TreeFact{entity:Arbitrary::arbitrary(g),
avs:Arbitrary::arbitrary(g),
t: Arbitrary::arbitrary(g)};
if ret.avs.len() == 0 {
ret.avs = vec!((Term::Variable(Variable::Hole),TreeTerm::Term(Term::Variable(Variable::Hole))))
}
ret
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
Box::new((self.entity.clone(),self.avs.clone(),self.t.clone())
.shrink()
.filter(|&(_,ref a,_)| a.len() > 0)
.map(|(e,av,t)| TreeFact{entity:e,avs:av,t:t}))
}
}
impl Arbitrary for Pred {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("PD>");
match g.gen_range(0,4) {
0 => {Pred::RowFact(Arbitrary::arbitrary(g))},
1 => {Pred::TreeFact(Arbitrary::arbitrary(g))},
2 => {Pred::Equation(Arbitrary::arbitrary(g))},
3 => {Pred::SemiRange(Arbitrary::arbitrary(g))},
_ => panic!()
}
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
match *self {
Pred::RowFact(ref x) => {Box::new(x.shrink().map(Pred::RowFact))},
Pred::TreeFact(ref x) => {Box::new(x.shrink().map(Pred::TreeFact))},
Pred::Equation(ref x) => {Box::new(x.shrink().map(Pred::Equation))},
Pred::SemiRange(ref x) => {Box::new(x.shrink().map(Pred::SemiRange))}
}
}
}
impl Arbitrary for Fact {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("F>");
Fact(Arbitrary::arbitrary(g))
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
Box::new(self.0.shrink().map(Fact))
}
}
impl Arbitrary for Relation {
fn arbitrary<G:Gen>(g: &mut G) -> Self {
//print!("RL>");
let mut t = Relation{head:Arbitrary::arbitrary(g),
vars:Arbitrary::arbitrary(g),
preps:Arbitrary::arbitrary(g)};
if t.vars.len() == 0 {
t.vars = vec![Variable::Hole];
}
if t.preps.len() == 0 {
t.preps = vec![Arbitrary::arbitrary(g)];
}
t
}
fn shrink(&self) -> Box<Iterator<Item=Self>> {
Box::new((self.head.clone(),self.vars.clone(),self.preps.clone()).shrink()
.filter(|&(_,ref v,ref p)| {v.len() > 0 && p.len() > 0})
.map(|(h,v,p)| Relation{head:h,vars:v,preps:p}))
}
}
}
|
use std::io::BufRead;
use std::env;
use std::fs::File;
use std::io::BufReader;
use std::io;
use std::time::Instant;
use sudoku::solver;
fn main() {
let args: Vec<String> = env::args().collect();
let file = &args[1];
let rows: usize = args[2].parse().unwrap();
eprintln!("Start read...");
let start_read = Instant::now();
let (quizzes, solutions) = read(file, rows).expect("Can't read.");
let read_time = start_read.elapsed();
eprintln!("Start resolve...");
let start_solve = Instant::now();
solve(quizzes, solutions);
let solve_time = start_solve.elapsed();
eprintln!("{}ms\t{}ms", read_time.as_millis(), solve_time.as_millis());
}
fn read(file: &String, rows: usize) -> io::Result<(Box<[solver::Board]>, Box<[solver::Board]>)> {
let mut quizzes: Vec<solver::Board> = Vec::with_capacity(rows);
let mut solutions: Vec<solver::Board> = Vec::with_capacity(rows);
let f = File::open(file)?;
let mut reader = BufReader::new(f);
// 1行目をスキップ
let mut buffer = String::new();
reader.read_line(&mut buffer)?;
for _i in 0..rows {
buffer = String::new();
reader.read_line(&mut buffer)?;
let mut line = buffer.chars();
// 問題
let mut quiz: solver::Board = [0; solver::N_BOARD];
for j in 0..quiz.len() {
quiz[j] = (line.next().unwrap() as u8) - b'0';
}
quizzes.push(quiz);
// カンマ
line.next();
// 回答
let mut solution: solver::Board = [0; solver::N_BOARD];
for j in 0..solution.len() {
solution[j] = (line.next().unwrap() as u8) - b'0';
}
solutions.push(solution);
}
return Ok((quizzes.into_boxed_slice(), solutions.into_boxed_slice()));
}
fn solve(quizzes: Box<[solver::Board]>, solutions: Box<[solver::Board]>) {
for i in 0..quizzes.len() {
let answer = solver::solve(quizzes[i]);
if answer.is_err() {
panic!("Invalid answer. [err={}]", answer.err().unwrap());
}
let a = answer.unwrap();
if !valid(a, solutions[i]) {
panic!("Invalid answer. [index={}]", i);
}
// _export(a);
}
}
fn valid(answer: solver::Board, solution: solver::Board) -> bool {
for i in 0..answer.len() {
if answer[i] != solution[i] {
return false;
}
}
return true;
}
fn _export(answer: solver::Board) {
let mut s = "".to_string();
for c in answer.iter() {
s.push_str(&c.to_string());
}
println!("{}", s)
}
|
pub use self::opcode::*;
pub use self::ty::*;
pub use self::value::*;
pub use self::context::*;
pub use self::module::*;
pub use self::attributes::*;
pub use self::block::*;
pub use self::function::*;
pub use self::passes::*;
pub mod opcode;
pub mod ty;
pub mod value;
pub mod context;
pub mod module;
pub mod attributes;
pub mod block;
pub mod function;
pub mod passes;
/// An enumeration for the kinds of linkage for global values.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(C)]
pub enum Linkage
{
/// Externally visible function.
External = 0,
/// Available for inspection, not emission.
AvailableExternally,
/// Keep one copy of function when linking (inline).
LinkOnceAny,
/// Same as `LinkOnceAny`, but only replaced by something equivalent.
LinkOnceODR,
/// Keep one copy of named function when linking (weak).
WeakAny,
/// Same as `WeakAny`, but only replaced by something equivalent.
WeakODR,
/// Special purpose, only applies to global arrays.
Appending,
/// Rename collisions when linking (static functions).
Internal,
/// Like `Internal`, but omit from symbol table.
Private,
ExternalWeak,
/// Tentative definitions.
Common,
}
/// Atomic ordering.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(C)]
pub enum AtomicOrdering
{
NotAtomic = 0,
Unordered = 1,
Monotonic = 2,
// 3 is not implemented yet.
Acquire = 4,
Release = 5,
AcquireRelease = 6,
SequentiallyConsistent = 7,
}
/// Synchronization scope.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(C)]
pub enum SynchronizationScope
{
SingleThread = 0,
CrossThread = 1,
}
/// Thread local mode.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(C)]
pub enum ThreadLocalMode
{
NotThreadLocal = 0,
GeneralDynamic,
LocalDynamic,
InitialExec,
LocalExec,
}
/// Float predicate kind.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(C)]
pub enum FloatPredicateKind
{
False = 0,
OrderedAndEqual = 1,
OrderedGreaterThan = 2,
OrderedGreaterThanOrEqual = 3,
OrderedLessThan = 4,
OrderedLessThanOrEqual = 5,
OrderedUnequal = 6,
Ordered = 7,
Unordered = 8,
UnorderedOrEqual = 9,
UnorderedOrGreaterThan = 10,
UnorderedOrGreaterThanOrEqual = 11,
UnorderedOrLessThan = 12,
UnorderedOrLessThanOrEqual = 13,
UnorderedOrNotEqual = 14,
True = 15,
}
/// Integer predicate kind.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(C)]
pub enum IntegerPredicateKind
{
Equal = 32,
NotEqual = 33,
UnsignedGreaterThan = 34,
UnsignedGreaterThanOrEqual = 35,
UnsignedLessThan = 36,
UnsignedLessThanOrEqual = 37,
SignedGreaterThan = 38,
SignedGreaterThanOrEqual = 39,
SignedLessThan = 40,
SignedLessThanOrEqual = 41,
}
/// Integer predicate kind.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[repr(C)]
pub enum AtomicBinaryOp
{
Xchg,
Add,
Sub,
And,
Nand,
Or,
Xor,
Max,
Min,
UMax,
UMin,
}
#[cfg(test)]
mod test
{
use ir;
use std::mem;
use libc;
#[test]
fn linkage_ints_are_correct() {
assert_eq!(ir::Linkage::External as u32, 0);
assert_eq!(ir::Linkage::Private as u32, 8);
}
#[test]
fn linkage_is_c_unsigned() {
assert_eq!(mem::size_of::<ir::Linkage>(), mem::size_of::<libc::c_uint>());
}
}
|
use std::fs::File;
use std::io;
use std::io::{Read, BufRead, BufReader, BufWriter, Write};
fn main() {
// open_file();
// read_bytes();
// write_bytes();
// stdin1();
// stdin2();
// iterators1();
// iterators2();
let a = errors1();
}
fn open_file() {
let mut f = File::open("records.txt").unwrap();
let mut buffer = [0; 1024];
let _ = f.read(&mut buffer[..]).unwrap();
println!("{:?}", buffer)
}
fn read_bytes() {
let f = File::open("records.txt").unwrap();
let mut buf_reader = BufReader::new(f);
let mut buffer = String::new();
buf_reader.read_line(&mut buffer).unwrap();
println!("Read the following: {}", buffer);
}
fn write_bytes() {
let f = File::create("result.txt").unwrap();
let mut buf_writer = BufWriter::new(f);
let buffer = String::from("Hello, world");
buf_writer.write(buffer.as_bytes()).unwrap();
}
fn stdin1() {
let mut buffer = String::new();
let _ = io::stdin().read_line(&mut buffer).unwrap();
io::stdout().write(&mut buffer.as_bytes()).unwrap();
}
fn stdin2() {
let mut buffer = [8; 1024];
let stdin_handle = std::io::stdin();
let mut locked_stdin_handle = stdin_handle.lock();
locked_stdin_handle.read(&mut buffer).unwrap();
let stdout_handle = std::io::stdout();
let mut locked_stdout_handle = stdout_handle.lock();
locked_stdout_handle.write(&mut buffer).unwrap();
}
fn iterators1() {
let s = std::io::stdin();
let file_reader = BufReader::new(s);
for line in file_reader.lines() {
println!("You typed: {}", line.unwrap());
}
}
fn iterators2() {
let f1 = File::open("file1.txt").unwrap();
let f2 = File::open("file2.txt").unwrap();
let mut chained_handle = f1.chain(f2);
let mut buffer = String::new();
chained_handle.read_to_string(&mut buffer).unwrap();
println!("Chainded handle: \n {}", buffer);
}
fn errors1() -> std::io::Result<()> {
let f1 = File::open("file1.txt")?;
let f2 = File::open("file3.txt")?;
//Chain the two file handles
let mut chained_handle = f1.chain(f2);
// Create a buffer to read into
let mut buffer = String::new();
// Read from chained handle into buffer
chained_handle.read_to_string(&mut buffer)?;
println!("Read from chained handle: {}", buffer);
Ok(())
}
|
use std::collections::HashSet;
use serde_json::Value;
use crate::validator::{scope::ScopedSchema, state::ValidationState, Validator};
pub fn validate_as_object(scope: &ScopedSchema, data: &Value) -> ValidationState {
let object = match data.as_object() {
Some(x) => x,
None => return ValidationState::new_with_error(scope.error("type", "expected 'object'")),
};
let mut state = ValidationState::new();
let mut remaining_keys: HashSet<&str> = object.keys().map(AsRef::as_ref).collect();
// Validate .properties first
for (index, property) in scope.schema().properties().iter().enumerate() {
let nested_scope = scope.scope_with_property(index, property);
let nested_state = nested_scope.validate(object.get(property.name()));
state.extend(nested_state);
remaining_keys.remove(property.name());
}
match (scope.schema().keys(), scope.schema().values()) {
// Schema contains keys & values, validate pattern properties
(Some(schema_keys), Some(schema_values)) => {
for key in remaining_keys {
let value = object.get(key);
state.extend(schema_keys.validate(Some(&Value::String(key.to_string()))));
state.extend(schema_values.validate(value));
}
}
// Schema doesn't contain keys & values, just check for additional properties
_ => {
if !remaining_keys.is_empty() && !scope.schema().additional_properties() {
state.push_error(scope.error("additionalProperties", "not allowed"));
}
}
}
state
}
|
#![allow(unused_must_use)]
extern crate nanomsg;
use nanomsg::{Socket, Protocol};
use std::time::duration::Duration;
use std::io::timer::sleep;
fn collector() {
let mut socket = Socket::new(Protocol::Pull).unwrap();
socket.bind("ipc:///tmp/pipeline_collector.ipc");
loop {
match socket.read_to_string() {
Ok(msg) => println!("Collected work result for '{}'.", msg.as_slice()),
Err(err) => {
println!("Collector failed '{}'.", err);
break
}
}
}
}
fn worker() {
let mut input = Socket::new(Protocol::Pull).unwrap();
let mut output = Socket::new(Protocol::Push).unwrap();
input.connect("ipc:///tmp/pipeline_worker.ipc");
output.connect("ipc:///tmp/pipeline_collector.ipc");
loop {
match input.read_to_string() {
Ok(msg) => {
println!("Worker received '{}'.", msg.as_slice());
sleep(Duration::milliseconds(300)); // fake some work ...
output.write(msg.as_bytes());
},
Err(err) => {
println!("Worker failed '{}'.", err);
break;
}
}
}
}
fn feeder() {
let mut socket = Socket::new(Protocol::Push).unwrap();
let mut endpoint = socket.bind("ipc:///tmp/pipeline_worker.ipc").unwrap();
let sleep_duration = Duration::milliseconds(100);
let mut count = 1u32;
loop {
let msg = format!("Message #{}", count);
let msg_bytes = msg.as_bytes();
let write_res = socket.write(msg_bytes);
if write_res.is_err() {
break;
}
sleep(sleep_duration);
count = count + 1;
}
endpoint.shutdown();
}
fn main() {
let args = std::os::args();
if args.len() < 2 {
println!("Usage: pipeline feeder, pipeline worker, pipeline collector")
println!(" Try running several workers")
println!(" And also try killing and restarting")
return
}
if args[1].as_slice() == "worker".as_slice() {
worker();
}
else if args[1].as_slice() == "feeder".as_slice() {
feeder();
}
else if args[1].as_slice() == "collector".as_slice() {
collector();
}
} |
use crate::libs::color::color_system;
use isaribi::{
style,
styled::{Style, Styled},
};
use nusa::prelude::*;
pub struct Select {}
impl Select {
fn render(color: &str, attrs: Attributes, events: Events, children: Vec<Html>) -> Html {
Self::styled(Html::select(
attrs.class(Self::class("base")).class(Self::class(color)),
events,
children,
))
}
pub fn light(attrs: Attributes, events: Events, children: Vec<Html>) -> Html {
Self::render("light", attrs, events, children)
}
}
impl Styled for Select {
fn style() -> Style {
style! {
".base" {
"display": "inline-block";
"border-radius": "2px";
"background-color": color_system::gray(255, 0);
"padding": "0.5em 1em";
}
".light" {
"color": color_system::gray(255, 9);
"border": format!("0.1em solid {}", color_system::gray(255, 9));
}
}
}
}
|
//! Library for versioning symbols in native libraries.
//!
//! This library provides a means by which native (C, C++, assembly) symbols can
//! be automatically version mangled. This allows multiple versions of a Rust
//! library with C, C++, and assembly code to be used in one application.
//!
//! # How it Works
//!
//! The general idea is that all symbols in C, C++, and assembly code will be
//! mangled with the current crate version. For instance, a symbol `some_func`
//! will become `some_func_v1_23_2_beta`. On the Rust side, all symbols will be
//! linked to using the mangled name. The C, C++, and assembly name mangling
//! will happen transparently. On the Rust side, all `extern` block will be
//! modified to use this crate's `versioned_extern!` macro.
//!
//! # Detailed Usage
//!
//! This library works best with the [`cc`] crate, and we assume you're using
//! `cc` to build external source files in this documentation.
//!
//! ## Dependencies
//!
//! First, you'll need to add the following to your `Cargo.toml`:
//!
//! ```toml
//! [dependencies]
//! native_versioning = "*"
//!
//! [build_dependencies]
//! native_versioning = { version = "*", features = ["build"] }
//! ```
//!
//! ## `build.rs`
//!
//! Your `build.rs` will generate the `generated_versioned.h` file using the
//! [`write_versioned_header()`] function and build the native sources with the
//! appropriate flags.
//!
//! ### Generated Header File
//!
//! To use the [`write_versioned_header()`] function, you first need to decide
//! three things:
//!
//! 1. The name of the generated header file.
//! 2. The name of the generated macro in the header file.
//! 3. Where to store the generated header file.
//!
//! We recommend you record the first two decisions in `const`s and create a
//! function for the third:
//!
//! ```rust
//! use std::path::PathBuf;
//!
//! const GENERATED_VERSIONED_HEADER: &str = "generated_versioned.h";
//! const GENERATED_VERSIONED_MACRO: &str = "VERSIONED";
//!
//! fn generated_include_dir() -> PathBuf {
//! const GENERATED_INCLUDE_DIR: &str = "generated_headers";
//! PathBuf::from(env::var("OUT_DIR").unwrap()).join(GENERATED_INCLUDE_DIR)
//! }
//! ```
//!
//! Finally, in `main`, generate the header file:
//!
//! ```rust
//! let generated_include_dir = generated_include_dir();
//! write_versioned_header(&generated_include_dir,
//! GENERATED_VERSIONED_HEADER,
//! GENERATED_VERSIONED_MACRO)
//! .expect("generated versioned header file");
//! ```
//!
//! The file is written to `generated_include_dir()/GENERATED_VERSION_HEADER`.
//!
//! ### Versioning Symbols
//!
//! You'll now need to use the generated header to construct a header that
//! versions all symbols in your C, C++, and assembly source files. This file
//! should look as follows:
//!
//! ```C
//! #include <generated_versioned.h>
//!
//! #define foo VERSIONED(foo)
//! #define bar VERSIONED(bar)
//!
//! // only necessary if using native MacOS symbols in assembly files
//! #define _foo VERSIONED(_foo)
//! #define _bar VERSIONED(_bar)
//! ```
//!
//! Note that the filename in the `#include` corresponds to the
//! `GENERATED_VERSIONED_HEADER` `const` and the `VERSIONED` macro being called
//! corresponds to the `GENERATED_VERSIONED_MACRO` `const`.
//!
//! You might find it useful to create a function that returns a path to this
//! file:
//!
//! ```rust
//! fn custom_versioned_symbols_header() -> PathBuf {
//! PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("ext").join("versioned.h")
//! }
//! ```
//!
//! ### Building Sources
//!
//! Finally, we'll tie everything together.
//!
//! In your `cc::Build` object, add the directory including the generated
//! versioned header file to the include path and use the [`include_header`]
//! method provided by this crate to include your custom versioned symbols
//! header file:
//!
//! ```rust,ignore
//! use native_versioning::HeaderInclude;
//!
//! cc::build::new()
//! ...
//! .include(&generated_include_dir())
//! .include_header(&custom_versioned_symbols_header())
//! ...
//! ```
//!
//! ### Overview
//!
//! In all, a simple `build.rs` using this crate will look as follows:
//!
//! ```rust
//! extern crate cc;
//! extern crate native_versioning;
//!
//! use std::path::{Path, PathBuf};
//!
//! use native_versioning::{HeaderInclude, write_versioned_header};
//!
//! const GENERATED_VERSIONED_HEADER: &str = "generated_versioned.h";
//! const GENERATED_VERSIONED_MACRO: &str = "VERSIONED";
//!
//! fn generated_include_dir() -> PathBuf {
//! const GENERATED_INCLUDE_DIR: &str = "generated_headers";
//! PathBuf::from(::std::env::var("OUT_DIR").unwrap()).join(GENERATED_INCLUDE_DIR)
//! }
//!
//! fn custom_versioned_symbols_header() -> PathBuf {
//! PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("ext").join("versioned.h")
//! }
//!
//! fn main() {
//! let generated_include_dir = generated_include_dir();
//! write_versioned_header(&generated_include_dir,
//! GENERATED_VERSIONED_HEADER,
//! GENERATED_VERSIONED_MACRO)
//! .expect("generated versioned header file");
//!
//! cc::Build::new()
//! .file(Path::new("ext").join("foo.c"))
//! .file(Path::new("ext").join("bar.S"))
//! .include(&generated_include_dir)
//! .include_header(&custom_versioned_symbols_header())
//! .compile("foo");
//! }
//! ```
//!
//! ## Importing Mangled Symbols
//!
//! To import the versioned symbols on the Rust side, use the
//! [`versioned_extern!`] macro provided by this crate:
//!
//! ```rust
//! #[macro_use] extern crate native_versioning;
//!
//! versioned_extern! {
//! fn foo(u8) -> u8;
//! fn bar(*mut i16, *mut i32);
//! }
//!
//! fn main() {
//! unsafe {
//! println!("Number: {}", foo(10));
//! }
//! }
//! ```
//!
//! The macro is a drop-in replacement for Rust's `extern` blocks. As a result,
//! you can take an existing codebase and simply replace all appearances of
//! `extern {` with `versioned_extern! {`.
//!
//! [`write_versioned_header()`]: fn.write_versioned_header.html
//! [`versioned_extern!`]: macro.versioned_extern.html
//!
mod versioned_extern;
#[cfg(feature = "build")]
mod build_support;
#[cfg(feature = "build")]
pub use build_support::*;
|
/*
* Copyright 2018 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ------------------------------------------------------------------------------
*/
use std::{env, fs, path::PathBuf};
fn main() {
println!("Check if building for SGX hardware mode");
let sgx_hw_mode = match env::var("SGX_HW_MODE") {
Ok(hardware_mode) => {
if hardware_mode == "TRUE" {
true
} else {
false
}
}
Err(_) => false,
};
// Generate verifier module based on sgx_hw_mode information.
let mut manifest_path =
PathBuf::from(env::var("CARGO_MANIFEST_DIR").expect("Cargo manifest directory not set"));
manifest_path.push("src");
let mut source = manifest_path.clone();
let mut destination = manifest_path;
destination.push("validator_registry_tp_verifier.rs");
if sgx_hw_mode {
println!("Validator registry TP will compile to work in SGX hardware mode");
source.push("sgx");
} else {
println!("Validator registry TP will compile to work in simulator mode");
source.push("simulator");
}
source.push("validator_registry_tp_verifier.rs");
println!(
"Copying from {:?} to {:?}",
source.clone(),
destination.clone()
);
fs::copy(source, destination)
.expect("Build will fail, because copy operation not permitted in the path!");
}
|
use crate::int_var::IntVar;
use crate::looping::{self, IterAttrs, IterResult, NativeIterator};
use crate::runtime::Runtime;
use crate::std_type::Type;
use crate::variable::{FnResult, Variable};
use num::traits::Zero;
use num::One;
use std::cell::RefCell;
use std::rc::Rc;
#[derive(Debug, Clone)]
pub struct Enumerate {
iterable: looping::Iterator,
i: RefCell<IntVar>,
}
impl Enumerate {
pub fn new(iterable: looping::Iterator) -> Rc<Enumerate> {
Rc::new(Enumerate {
iterable,
i: RefCell::new(Zero::zero()),
})
}
fn inner_next(&self, runtime: &mut Runtime) -> Result<Option<(Variable, Variable)>, ()> {
if let Option::Some(val) = self.iterable.next(runtime)?.take_first() {
let i = self.i.replace_with(|x| &*x + &IntVar::one());
let index = i.into();
Result::Ok(Option::Some((index, val)))
} else {
Result::Ok(Option::None)
}
}
fn create(_args: Vec<Variable>, _runtime: &mut Runtime) -> FnResult {
unimplemented!()
}
}
impl IterAttrs for Enumerate {
fn next_fn(self: Rc<Self>, args: Vec<Variable>, runtime: &mut Runtime) -> FnResult {
debug_assert!(args.is_empty());
match self.inner_next(runtime)? {
Option::Some((index, value)) => {
runtime.return_n([Option::Some(index).into(), Option::Some(value).into()])
}
Option::None => runtime.return_1(Option::None.into()),
}
}
fn get_type() -> Type {
custom_class!(Enumerate, create, "Enumerate")
}
}
impl NativeIterator for Enumerate {
fn next(self: Rc<Self>, runtime: &mut Runtime) -> IterResult {
Result::Ok(self.inner_next(runtime)?.map(|(x, y)| vec![x, y]).into())
}
}
|
#[macro_use]
extern crate log;
extern crate pretty_env_logger;
use warp::Filter;
mod handler;
#[tokio::main]
async fn main() {
pretty_env_logger::init();
// POST /invoke
let route = warp::path!("invoke")
.and(warp::post())
.and_then(handler::run);
info!("Starting server ...");
warp::serve(route).run(([0, 0, 0, 0], 9000)).await;
}
|
use pyo3::prelude::*;
use streamson_lib::strategy;
use crate::{handler::BaseHandler, PythonOutput, PythonStrategy, RustMatcher};
/// Low level Python wrapper for Filter strategy
#[pyclass]
pub struct Filter {
filter: strategy::Filter,
}
#[pymethods]
impl Filter {
/// Create a new instance of Filter
#[new]
pub fn new() -> PyResult<Self> {
let filter = strategy::Filter::new();
Ok(Self { filter })
}
/// Adds matcher for Filter
///
/// # Arguments
/// * `matcher` - matcher to be added (`Simple`, `Depth`, ...)
pub fn add_matcher(&mut self, matcher: &RustMatcher, handler: Option<BaseHandler>) {
self.filter.add_matcher(
Box::new(matcher.inner.clone()),
if let Some(hndlr) = handler {
Some(hndlr.inner)
} else {
None
},
);
}
/// Processes input data
fn process(&mut self, input_data: &[u8]) -> PyResult<Vec<PythonOutput>> {
self._process(input_data)
}
/// Functions which is triggered when the input has stopped
fn terminate(&mut self) -> PyResult<Vec<PythonOutput>> {
self._terminate()
}
}
impl PythonStrategy<strategy::Filter> for Filter {
fn get_strategy(&mut self) -> &mut strategy::Filter {
&mut self.filter
}
}
|
fn is_palindrome(s: &String) -> bool {
for (a,b) in s.chars().zip(s.chars().rev()){
if a != b {
return false;
}
}
true
}
pub fn problem_036() -> u32 {
let n = 1000000;
let mut palindromes: Vec<u32> = vec![];
for i in 0..n {
let base_10 = format!("{}",i);
let base_2 = format!("{:b}", i);
if is_palindrome(&base_10) & is_palindrome(&base_2){
palindromes.push(i as u32);
}
}
palindromes.iter().fold(0,|a,&b| a + b)
}
#[cfg(test)]
mod test {
use super::*;
use test::Bencher;
#[test]
fn test_problem_036() {
let ans: u32 = problem_036();
println!("Answer to Problem 36: {}", ans);
assert!(ans == 872187)
}
#[bench]
fn bench_problem_036(b: &mut Bencher) {
b.iter(|| problem_036());
}
}
|
use std::collections::HashMap;
struct MorseDecoder {
morse_code: HashMap<String, String>,
}
impl MorseDecoder {
fn new() -> MorseDecoder {
MorseDecoder{ morse_code :
[("....-", "4"),("--..--", ","),(".--", "W"),(".-.-.-", "."),("..---", "2"),(".", "E"),("--..", "Z"),(".----", "1"),(".-..", "L"),
(".--.", "P"),(".-.", "R"),("...", "S"),("-.--", "Y"),("...--", "3"),(".....", "5"),("--.", "G"),("-.--.", "("),("-....", "6"),
(".-.-.", "+"),("...-..-", "$"),(".--.-.", "@"),("...---...", "SOS"),("..--.-", "_"),("-.", "N"),("-..-", "X"),("-----", "0"),
("....", "H"),("-...", "B"),(".---", "J"),("---...", ","),("-", "T"),("---..", "8"),("-..-.", "/"),("--.-", "Q"),("...-", "V"),
("----.", "9"),("--", "M"),("-.-.-.", ";"),("-.-.--", "!"),("..-.", "F"),("..--..", "?"),("-...-", "="),("..-", "U"),(".----.", "'"),
("---", "O"),("-.--.-", ")"),("..", "I"),("-....-", "-"),(".-..-.", "\""),(".-", "A"),("-.-.", "C"),("-..", "D"),(".-...", "&"),
("--...", "7"),("-.-", "K")].iter().map(|(k, v)| (k.to_string(), v.to_string())).collect()}
}
pub fn decode_bits(&self, encoded: &str) -> String {
let mut x: Vec<&str> = encoded.trim_matches('0').split("0").collect();
x.extend( encoded.trim_matches('0').split("1").collect::<Vec<&str>>());
x.sort();
x.dedup();
if x[0] == "" { x.remove(0); }
let mut z: Vec<usize> = x.clone().into_iter().map({|e| e.len()}).collect();
z.sort();
let unit = z[0];
let r = encoded
.trim_matches('0')
.replace(&"0".repeat(7*unit), " ")
.replace(&"0".repeat(3*unit), " ")
.replace(&"1".repeat(3*unit), "-")
.replace(&"0".repeat(unit), "")
.replace(&"1".repeat(unit), ".");
r.to_string()
}
pub fn decode_morse(&self, encoded: &str) -> String {
let mut ret: String = String::new();
let words: Vec<&str> = encoded.trim().split(" ").collect();
if words == vec![""] { return ret; }
for word in words {
let signs = word.split(" ");
let mut word_string = String::new();
for sign in signs {
word_string.push_str( &self.morse_code[sign].clone() );
}
ret.push_str( & word_string );
ret.push(' ');
}
ret.pop();
ret.to_string()
}
}
#[test]
fn test0() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(&decoder.decode_bits("1")), "E".to_string());
}
#[test]
fn test1() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(&decoder.decode_bits("11")), "E".to_string());
}
#[test]
fn test2() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(&decoder.decode_bits("111")), "E".to_string());
}
#[test]
fn test3() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(&decoder.decode_bits("1111111111111111")), "E".to_string());
}
#[test]
fn test4() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(&decoder.decode_bits("101")), "I".to_string());
}
#[test]
fn test5() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(&decoder.decode_bits("10101")), "S".to_string());
}
#[test]
fn test6() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(&decoder.decode_bits("10101011101110111010101")), "SOS".to_string());
}
#[test]
fn test7() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(&decoder.decode_bits("000000000000000000000000000000001")), "E".to_string());
}
#[test]
fn test8() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(&decoder.decode_bits("1000000000000000000000000000")), "E".to_string());
}
#[test]
fn test9() {
let decoder = MorseDecoder::new();
assert_eq!(decoder.decode_morse(&decoder.decode_bits("0000000000000010000000000000000000000000")), "E".to_string());
}
fn main() {
}
|
use std::collections::HashMap;
use std::collections::HashSet;
use std::io;
use std::io::BufRead;
use std::io::BufReader;
fn has_unique_words(words: &Vec<String>) -> bool {
let mut word_set = HashSet::new();
for word in words {
if word_set.contains(word) {
return false;
}
word_set.insert(word);
}
true
}
fn make_letter_count(s: &str) -> HashMap<u8, u32> {
let mut letter_count = HashMap::new();
for &c in s.as_bytes() {
let cnt = letter_count.entry(c).or_insert(0);
*cnt += 1;
}
letter_count
}
fn has_no_anagrams(words: &Vec<String>) -> bool {
let mut letter_counts = Vec::new();
for word in words {
let letter_count = make_letter_count(word);
if letter_counts.iter().any(|m| m == &letter_count) {
return false;
}
letter_counts.push(letter_count);
}
true
}
fn main() {
let reader = BufReader::new(io::stdin());
let passphrases: Vec<Vec<String>> = reader
.lines()
.map(|line| {
line.unwrap()
.split_whitespace()
.map(|words| words.to_string())
.collect()
})
.collect();
let mut unique_words = 0;
let mut no_anagrams = 0;
for words in passphrases {
if has_unique_words(&words) {
unique_words += 1;
}
if has_no_anagrams(&words) {
no_anagrams += 1;
}
}
println!("part 1: {}", unique_words);
println!("part 2: {}", no_anagrams);
}
|
use cocoa::base::id;
/// The `MTLLibrary` protocol defines the interface for an object that represents a library of Metal
/// shader functions. A `MTLLibrary` object can contain Metal shading language code that is compiled
/// during the app build process or at runtime from a text string containing Metal shading language
/// source code. Your app does not define classes that implement this protocol.
///
/// Use a `MTLDevice` method (not standard allocation and initialization techniques) to create a
/// `MTLLibrary` object. To create a `MTLLibrary` object from a Metal library binary, call one of
/// these MTLDevice methods:
///
/// * `newDefaultLibrary`
///
/// * `newLibraryWithFile:error:`
///
/// * `newLibraryWithData:error:`
///
/// To create a `MTLLibrary` object by compiling source code, call one of these `MTLDevice` methods:
///
/// * `newLibraryWithSource:options:error:`
///
/// * `newLibraryWithSource:options:completionHandler:`
///
/// The `newFunctionWithName:` method is used to fetch functions from the library, which makes that
/// code available as a shader for either a `MTLRenderPipelineState` object for a render command
/// encoder or for a `MTLComputePipelineState` for a compute command encoder.
pub trait MTLLibrary {
/// Returns a function object that represents an entry point in the library.
///
/// # Parameters
///
/// * `functionName` - The name of an entry point.
///
/// # Return Value
///
/// A function object for the named entry point, or nil if the named function is not found in
/// the library.
unsafe fn newFunctionWithName(self, functionName: id) -> id;
/// A list of all entry points in the library. (read-only)
///
/// # Discussion
///
/// An array of `NSString` objects. Each string is the name of an entry point.
unsafe fn functionNames(self) -> id;
/// The device from which this library was created. (read-only)
///
/// # Discussion
///
/// This library can only be used with this device.
unsafe fn device(self) -> id;
/// A string to help identify the library object.
unsafe fn label(self) -> id;
unsafe fn setLabel(self, id);
}
impl MTLLibrary for id {
unsafe fn newFunctionWithName(self, functionName: id) -> id {
msg_send![self, newFunctionWithName:functionName]
}
unsafe fn functionNames(self) -> id {
msg_send![self, functionNames]
}
unsafe fn device(self) -> id {
msg_send![self, device]
}
unsafe fn label(self) -> id {
msg_send![self, label]
}
unsafe fn setLabel(self, label: id) {
msg_send![self, setLabel:label]
}
}
/// Error conditions that can result from the creation of a `MTLLibrary` or `MTLFunction` object.
#[repr(usize)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum MTLLibraryError {
/// The action is unsupported. For example, the requested library file has improper formatting,
/// or the requested library is not accessible.
MTLLibraryErrorUnsupported = 1,
/// The action causes an internal error.
MTLLibraryErrorInternal = 2,
/// Compilation fails
MTLLibraryErrorCompileFailure = 3,
/// Compilation succeeds without error, but there are compiler warnings.
MTLLibraryErrorCompileWarning = 4
}
#[link(name = "Metal", kind = "framework")]
extern "C" {
/// Constant to identify the `MTLLibrary` error domain.
pub static MTLLibraryErrorDomain: id;
}
/// Error conditions that can result from the creation of a `MTLRenderPipelineState` object.
#[repr(usize)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum MTLRenderPipelineError {
/// The action causes an internal error.
MTLRenderPipelineErrorInternal = 1,
/// The action is unsupported.
MTLRenderPipelineErrorUnsupported = 2,
/// The input values are invalid.
MTLRenderPipelineErrorInvalidInput = 3
}
#[link(name = "Metal", kind = "framework")]
extern "C" {
/// Constant to identify the `MTLRenderPipelineState` error domain.
pub static MTLRenderPipelineErrorDomain: id;
}
#[repr(usize)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum MTLLanguageVersion {
/// Version 1.0
MTLLanguageVersion1_0 = (1 << 16),
/// Version 1.1
MTLLanguageVersion1_1 = (1 << 16) + 1
}
|
#[doc = "Reader of register TIM15_AF1"]
pub type R = crate::R<u32, super::TIM15_AF1>;
#[doc = "Writer for register TIM15_AF1"]
pub type W = crate::W<u32, super::TIM15_AF1>;
#[doc = "Register TIM15_AF1 `reset()`'s with value 0x01"]
impl crate::ResetValue for super::TIM15_AF1 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x01
}
}
#[doc = "Reader of field `BKINE`"]
pub type BKINE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `BKINE`"]
pub struct BKINE_W<'a> {
w: &'a mut W,
}
impl<'a> BKINE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `BKDF1BK0E`"]
pub type BKDF1BK0E_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `BKDF1BK0E`"]
pub struct BKDF1BK0E_W<'a> {
w: &'a mut W,
}
impl<'a> BKDF1BK0E_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);
self.w
}
}
#[doc = "Reader of field `BKINP`"]
pub type BKINP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `BKINP`"]
pub struct BKINP_W<'a> {
w: &'a mut W,
}
impl<'a> BKINP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
impl R {
#[doc = "Bit 0 - BKINE"]
#[inline(always)]
pub fn bkine(&self) -> BKINE_R {
BKINE_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 8 - BKDF1BK0E"]
#[inline(always)]
pub fn bkdf1bk0e(&self) -> BKDF1BK0E_R {
BKDF1BK0E_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - BKINP"]
#[inline(always)]
pub fn bkinp(&self) -> BKINP_R {
BKINP_R::new(((self.bits >> 9) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - BKINE"]
#[inline(always)]
pub fn bkine(&mut self) -> BKINE_W {
BKINE_W { w: self }
}
#[doc = "Bit 8 - BKDF1BK0E"]
#[inline(always)]
pub fn bkdf1bk0e(&mut self) -> BKDF1BK0E_W {
BKDF1BK0E_W { w: self }
}
#[doc = "Bit 9 - BKINP"]
#[inline(always)]
pub fn bkinp(&mut self) -> BKINP_W {
BKINP_W { w: self }
}
}
|
use std::cmp;
use std::ptr;
use std::mem;
use std::cell::UnsafeCell;
use alloc::heap;
use alloc::oom::oom;
use std::sync::atomic::{AtomicUsize, AtomicBool, Ordering};
use std::sync::Arc;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct SendError<T>(pub T);
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct ReceiveError;
#[derive(PartialEq, Eq, Clone, Copy)]
pub enum TrySendError<T> {
Full(T),
Disconnected(T),
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum TryReceiveError {
Empty,
Disconnected,
}
pub struct BoundedSpscQueue;
pub struct Sender<T> {
core: Arc<Core<T>>,
head: usize,
tail: usize,
}
pub struct Receiver<T> {
core: Arc<Core<T>>,
head: usize,
tail: usize,
}
impl<T> !Sync for Sender<T> {}
impl<T> !Sync for Receiver<T> {}
pub struct Core<T> {
ptr: UnsafeCell<*mut T>,
len: usize,
tail: AtomicUsize,
head: AtomicUsize,
dropped: AtomicBool,
}
impl<T> ::std::fmt::Debug for TrySendError<T> {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match *self {
TrySendError::Full(..) => "Full(..)".fmt(f),
TrySendError::Disconnected(..) => "Disconnected(..)".fmt(f),
}
}
}
impl BoundedSpscQueue {
pub fn new<T>(bound: usize) -> (Sender<T>, Receiver<T>) {
let core = Arc::new(Core::new(bound));
(Sender::new(core.clone()), Receiver::new(core))
}
}
impl<T> Sender<T> {
fn new(core: Arc<Core<T>>) -> Sender<T> {
Sender {
core: core,
head: 0,
tail: 0,
}
}
pub fn send(&mut self, el: T) -> Result<(), SendError<T>> {
if self.core.dropped.load(Ordering::Relaxed) {
return Err(SendError(el));
}
let next_head = self.core.wrap_add(self.head, 1);
if next_head == self.tail {
loop {
self.tail = self.core.tail.load(Ordering::Relaxed);
if next_head != self.tail {
break;
} else {
if self.core.dropped.load(Ordering::Relaxed) {
return Err(SendError(el));
}
}
}
}
unsafe {
let p: *mut T = (*self.core.ptr.get()).offset(self.head as isize);
ptr::write(p, el)
};
self.head = next_head;
self.core.head.store(next_head, Ordering::Relaxed);
Ok(())
}
pub fn try_send(&mut self, el: T) -> Result<(), TrySendError<T>> {
if self.core.dropped.load(Ordering::Relaxed) {
return Err(TrySendError::Disconnected(el));
}
let next_head = self.core.wrap_add(self.head, 1);
if next_head == self.tail {
self.tail = self.core.tail.load(Ordering::Relaxed);
if next_head == self.tail {
return Err(TrySendError::Full(el));
}
}
unsafe {
let p: *mut T = (*self.core.ptr.get()).offset(self.head as isize);
ptr::write(p, el)
};
self.head = next_head;
self.core.head.store(next_head, Ordering::Relaxed);
Ok(())
}
}
impl<T> Receiver<T> {
fn new(core: Arc<Core<T>>) -> Receiver<T> {
Receiver {
core: core,
head: 0,
tail: 0,
}
}
pub fn recv(&mut self) -> Result<T, ReceiveError> {
if self.head == self.tail {
loop {
self.head = self.core.head.load(Ordering::Relaxed);
if self.head != self.tail {
break;
} else {
if self.core.dropped.load(Ordering::Relaxed) {
return Err(ReceiveError);
}
}
}
}
let data = unsafe {
let p: *mut T = (*self.core.ptr.get()).offset(self.tail as isize);
ptr::read(p)
};
self.tail = self.core.wrap_add(self.tail, 1);
self.core.tail.store(self.tail, Ordering::Relaxed);
Ok(data)
}
pub fn try_recv(&mut self) -> Result<T, TryReceiveError> {
if self.head == self.tail {
self.head = self.core.head.load(Ordering::Relaxed);
if self.head == self.tail {
if self.core.dropped.load(Ordering::Relaxed) {
return Err(TryReceiveError::Disconnected);
}
return Err(TryReceiveError::Empty);
}
}
let data = unsafe {
let p: *mut T = (*self.core.ptr.get()).offset(self.tail as isize);
ptr::read(p)
};
self.tail = self.core.wrap_add(self.tail, 1);
self.core.tail.store(self.tail, Ordering::Relaxed);
Ok(data)
}
}
impl<T> Drop for Sender<T> {
fn drop(&mut self) {
self.core.set_dropped();
}
}
impl<T> Drop for Receiver<T> {
fn drop(&mut self) {
self.core.set_dropped();
}
}
unsafe impl<T: Send> Send for Sender<T> {}
unsafe impl<T: Send> Send for Receiver<T> {}
impl<T> Core<T> {
fn new(bound: usize) -> Core<T> {
assert!(mem::size_of::<T>() != 0, "no ZST support");
let len = cmp::max(bound + 1, 1 + 1).next_power_of_two();
let align = mem::align_of::<T>();
let elem_size = mem::size_of::<T>();
let ptr = unsafe { heap::allocate(len * elem_size, align) };
if ptr.is_null() {
oom()
}
Core {
tail: AtomicUsize::new(0),
head: AtomicUsize::new(0),
len: len,
ptr: UnsafeCell::new(ptr as *mut _),
dropped: AtomicBool::new(false),
}
}
fn set_dropped(&self) {
self.dropped.store(true, Ordering::Relaxed);
}
#[inline]
fn wrap_add(&self, idx: usize, addend: usize) -> usize {
wrap_index(idx + addend, self.len)
}
}
impl<T> Drop for Core<T> {
fn drop(&mut self) {
let head = self.head.load(Ordering::Relaxed);
let mut tail = self.tail.load(Ordering::Relaxed);
while head != tail {
let _ = unsafe {
let p: *mut T = (*self.ptr.get()).offset(tail as isize);
ptr::read(p)
};
tail = self.wrap_add(tail, 1);
}
let align = mem::align_of::<T>();
let elem_size = mem::size_of::<T>();
let num_bytes = elem_size * self.len;
unsafe {
let ptr = *self.ptr.get();
heap::deallocate(ptr as *mut _, num_bytes, align);
}
}
}
#[inline]
fn wrap_index(index: usize, size: usize) -> usize {
// size is always a power of 2
debug_assert!(size.is_power_of_two());
index & (size - 1)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn smoke() {
let (mut sender, mut receiver) = BoundedSpscQueue::new(2);
sender.send(1).unwrap();
sender.send(2).unwrap();
assert_eq!(receiver.recv(), Ok(1));
assert_eq!(receiver.recv(), Ok(2));
sender.send(3).unwrap();
assert_eq!(receiver.recv(), Ok(3));
sender.send(1).unwrap();
sender.send(2).unwrap();
assert_eq!(receiver.recv(), Ok(1));
assert_eq!(receiver.recv(), Ok(2));
}
#[test]
fn try_smoke() {
let (mut sender, mut receiver) = BoundedSpscQueue::new(2);
sender.try_send(1).unwrap();
sender.try_send(2).unwrap();
assert_eq!(receiver.try_recv(), Ok(1));
assert_eq!(receiver.try_recv(), Ok(2));
sender.try_send(3).unwrap();
assert_eq!(receiver.try_recv(), Ok(3));
sender.try_send(1).unwrap();
sender.try_send(2).unwrap();
assert_eq!(receiver.try_recv(), Ok(1));
assert_eq!(receiver.try_recv(), Ok(2));
}
}
|
use std::fs::File;
use std::io::Read;
fn main() {
let mut file = File::open("d03-input").unwrap();
let mut input = String::new();
file.read_to_string(&mut input).unwrap();
let (mut tree, mut col) = (0, 0);
for line in input.lines() {
let x = line.chars().nth(col);
if x.unwrap() == '#' {
tree += 1;
}
col = (col + 3) % line.len();
}
println!("Number of trees encountered: {}", tree);
} |
use std::fmt;
use crate::card;
pub struct Foundation {
cards: Vec<&'static card::Card>,
suit: card::Suit,
}
impl Foundation {
pub fn new(suit: card::Suit) -> Foundation {
Foundation{cards: Vec::new(), suit}
}
pub fn get_top(&self) -> Option<&'static card::Card> {
if self.cards.is_empty() {
None
}
else {
Some(self.cards[self.cards.len() - 1])
}
}
pub fn take(&mut self) -> Option<&'static card::Card> {
self.cards.pop()
}
pub fn is_full(&self) -> bool {
match self.get_top() {
Some(top_card) => top_card.value == card::Value::King,
None => false,
}
}
pub fn can_add(&self, card: &'static card::Card) -> bool {
match self.get_top() {
Some(top_card) => card.suit == self.suit && top_card.is_one_more(card),
None => card.suit == self.suit && card.value == card::Value::Ace,
}
}
pub fn add(&mut self, card: &'static card::Card) {
self.cards.push(card);
}
}
impl fmt::Display for Foundation {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.get_top() {
Some(card) => write!(f, "{}", card),
None => write!(f, "{}", card::DISPLAY_EMPTY)
}
}
}
#[cfg(test)]
mod tests {
use crate::card;
use crate::deck;
use crate::foundation::Foundation;
use crate::deck::FULL_DECK;
use hamcrest2::prelude::*;
#[test]
fn test_create_foundation() {
let fdh = Foundation::new(card::Suit::Heart);
assert_that!(fdh.is_full(), is(false));
let top = fdh.get_top();
assert_that!(top.is_none(), is(true));
}
#[test]
fn test_can_add_empty() {
let fdh = Foundation::new(card::Suit::Heart);
assert_that!(fdh.can_add(deck::ACE_OF_HEARTS), is(true));
assert_that!(fdh.can_add(deck::ACE_OF_SPADES), is(false));
assert_that!(fdh.can_add(deck::ACE_OF_CLUBS), is(false));
assert_that!(fdh.can_add(deck::ACE_OF_DIAMONDS), is(false));
assert_that!(fdh.can_add(deck::TWO_OF_HEARTS), is(false));
}
#[test]
fn test_can_add() {
let mut fdh = Foundation::new(card::Suit::Heart);
fdh.add(deck::ACE_OF_HEARTS);
assert_that!(fdh.can_add(deck::TWO_OF_HEARTS), is(true));
fdh.add(deck::TWO_OF_HEARTS);
assert_that!(fdh.cards.len(), eq(2));
assert_that!(fdh.can_add(deck::FOUR_OF_HEARTS), is(false));
}
#[test]
fn test_is_full_true() {
let mut fdh = Foundation::new(card::Suit::Heart);
for card in FULL_DECK.iter().take(13) {
fdh.add(card);
}
assert_that!(fdh.is_full(), is(true));
}
#[test]
fn test_is_full_false() {
let mut fdh = Foundation::new(card::Suit::Heart);
fdh.add(deck::ACE_OF_HEARTS);
fdh.add(deck::TWO_OF_HEARTS);
assert_that!(fdh.is_full(), is(false));
}
#[test]
fn test_display_empty() {
let fdh = Foundation::new(card::Suit::Heart);
assert_that!(format!("{}", fdh), eq("{ }"));
}
#[test]
fn test_display() {
let mut fdh = Foundation::new(card::Suit::Heart);
fdh.add(deck::ACE_OF_HEARTS);
fdh.add(deck::TWO_OF_HEARTS);
assert_that!(format!("{}", fdh), eq("(2:H)"));
}
}
|
pub mod automaton;
pub mod cgol;
pub mod grid;
pub use automaton::Automaton;
pub use cgol::Cgol;
|
//! # MCPI API
//! `mcpi_api` is a wrapper for the Minecraft Pi Edition API handling parsing and other aspects for you.
use std::io::prelude::*;
use std::net::TcpStream;
use std::io::BufReader;
use std::cmp::{min, max};
#[cfg(test)]
mod tests {
#[test]
fn development_tests() {
}
}
///Struct containing functions and a Connection struct.
pub struct Minecraft {
conn:Connection
}
struct Connection {
stream:TcpStream
}
///Struct containing functions and a Connection struct.
pub struct Player<'a> {
conn:&'a mut Connection
}
///Struct used to specify tile positions.
#[derive(Debug)]
pub struct TileVec3 {
pub x:i32,
pub y:i32,
pub z:i32
}
///Struct used to specify entity positions.
#[derive(Debug)]
pub struct Vec3 {
pub x:f32,
pub y:f32,
pub z:f32
}
impl TileVec3 {
pub fn from(x:i32, y:i32, z:i32) -> TileVec3 {
TileVec3 {
x,
y,
z
}
}
pub fn from_vector(vec:&Vec<i32>) -> TileVec3 {
TileVec3 {
x: vec[0],
y: vec[1],
z: vec[2]
}
}
}
impl Vec3 {
pub fn from(x:f32, y:f32, z:f32) -> Vec3 {
Vec3 {
x,
y,
z
}
}
pub fn from_vector(vec:&Vec<f32>) -> Vec3 {
Vec3{
x: vec[0],
y: vec[1],
z: vec[2]
}
}
}
impl Connection {
pub fn send(&mut self, msg:&str) {
self.stream.write(&format!("{}\n", msg).as_bytes()).expect("Failed to send! Is MCPI still running?");
}
pub fn receive(&mut self) -> String {
let mut reader = BufReader::new(&self.stream);
let mut line = String::new();
reader.read_line(&mut line).expect("Failed to receive! Is MCPI still running?");
line.replace('\n',"")
}
pub fn send_receive(&mut self, msg:&str) -> String {
self.send(msg);
self.receive()
}
}
impl Minecraft {
pub fn post_to_chat(&mut self, msg:&str) {
self.conn.send(&format!("chat.post({})", msg));
}
pub fn get_block(&mut self, pos:&TileVec3) -> u8 {
self.conn.send_receive(&format!("world.getBlock({},{},{})", pos.x, pos.y, pos.z)).parse::<u8>().unwrap()
}
pub fn get_block_with_data(&mut self, pos:&TileVec3) -> Vec<u8> {
self.conn.send_receive(&format!("world.getBlockWithData({},{},{})", pos.x, pos.y, pos.z)).split(',').map(|s| s.parse()).collect::<Result<Vec<u8>, _>>().unwrap()
}
pub fn get_blocks(&mut self, pos1:&TileVec3, pos2:&TileVec3) -> Vec<u8> {
let mut results:Vec<u8> = vec![];
for y in min(pos1.y, pos2.y)..max(pos1.y, pos2.y)+1 {
for x in min(pos1.x, pos2.x)..max(pos1.x, pos2.x)+1 {
for z in min(pos1.z, pos2.z)..max(pos1.z, pos2.z) + 1 {
results.push(self.conn.send_receive(&format!("world.getBlock({},{},{})", x,y,z)).parse::<u8>().unwrap());
}
}
}
results
}
pub fn get_blocks_with_data(&mut self, pos1:&TileVec3, pos2:&TileVec3) -> Vec<Vec<u8>> {
let mut results:Vec<Vec<u8>> = vec![];
for y in min(pos1.y, pos2.y)..max(pos1.y, pos2.y)+1 {
for x in min(pos1.x, pos2.x)..max(pos1.x, pos2.x)+1 {
for z in min(pos1.z, pos2.z)..max(pos1.z, pos2.z) + 1 {
results.push(self.conn.send_receive(&format!("world.getBlockWithData({},{},{})", x,y,z)).split(',').map(|s| s.parse()).collect::<Result<Vec<u8>, _>>().unwrap());
}
}
}
results
}
pub fn set_block(&mut self, pos:&TileVec3, blocktype:u8, blockdata:u8) {
self.conn.send(&format!("world.setBlock({},{},{},{},{})", pos.x, pos.y, pos.z, blocktype, blockdata));
}
pub fn set_blocks(&mut self, pos1:&TileVec3, pos2:&TileVec3, blocktype:u8, blockdata:u8) {
self.conn.send(&format!("world.setBlocks({},{},{},{},{},{},{},{})", pos1.x,pos1.y,pos1.z,pos2.x,pos2.y,pos2.z,blocktype,blockdata));
}
pub fn get_height(&mut self, pos:&TileVec3) -> i8 {
self.conn.send_receive(&format!("world.getHeight({},{})", pos.x,pos.z)).parse::<i8>().unwrap()
}
pub fn save_checkpoint(&mut self) {
self.conn.send("world.checkpoint.save()");
}
pub fn restore_checkpoint(&mut self) {
self.conn.send("world.checkpoint.restore()");
}
pub fn setting(&mut self, setting:&str, status:bool) {
self.conn.send(&format!("world.setting({},{})",setting,if status == true {1} else {0}));
}
pub fn get_player_entity_ids(&mut self) -> Vec<u16> {
self.conn.send_receive(&format!("world.getPlayerIds()")).split("|").map(|s| s.parse()).collect::<Result<Vec<u16>, _>>().unwrap()
}
pub fn player(&mut self) -> Player {
Player {
conn: &mut self.conn
}
}
}
impl Player<'_> {
pub fn get_pos(&mut self) -> Vec3 {
let vec:Vec<f32> = self.conn.send_receive(&format!("player.getPos()")).split(',').map(|s| s.parse()).collect::<Result<Vec<f32>, _>>().unwrap();
Vec3::from_vector(&vec)
}
pub fn set_pos(&mut self, pos:&Vec3) {
self.conn.send(&format!("player.setPos({},{},{})", pos.x, pos.y, pos.z));
}
pub fn get_tile_pos(&mut self) -> TileVec3 {
let vec:Vec<i32> = self.conn.send_receive(&format!("player.getTile()")).split(',').map(|s| s.parse()).collect::<Result<Vec<i32>, _>>().unwrap();
TileVec3::from_vector(&vec)
}
pub fn set_tile_pos(&mut self, pos:&TileVec3) {
self.conn.send(&format!("player.setTile({},{},{})", pos.x, pos.y, pos.z))
}
pub fn setting(&mut self, setting:&str, status:bool) {
self.conn.send(&format!("player.setting({},{})",setting,if status {1} else {0}));
}
}
///Function to create a Minecraft struct.
/// Takes a IP adress and a port as arguments.
/// # Examples
/// ```
/// use mcpi_api::create;
/// let mut mc = create("localhost:4711");
/// mc.post_to_chat("Hello World!")
/// ```
/// # Panics
/// This function panics if binding to the adress fails.
pub fn create(adress:&str) -> Minecraft {
let stream = TcpStream::connect(adress);
match stream {
Ok(_) => {}
Err(_) => {
panic!("Failed to connect to the API! Is Minecraft running?")
}
}
Minecraft {
conn: Connection {
stream: stream.unwrap()
}
}
} |
#![allow(non_snake_case)]
#[allow(unused_imports)]
use std::io::{self, Write};
#[allow(unused_imports)]
use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque};
#[allow(unused_imports)]
use std::cmp::{max, min, Ordering};
macro_rules! input {
(source = $s:expr, $($r:tt)*) => {
let mut iter = $s.split_whitespace();
let mut next = || { iter.next().unwrap() };
input_inner!{next, $($r)*}
};
($($r:tt)*) => {
let stdin = std::io::stdin();
let mut bytes = std::io::Read::bytes(std::io::BufReader::new(stdin.lock()));
let mut next = move || -> String{
bytes
.by_ref()
.map(|r|r.unwrap() as char)
.skip_while(|c|c.is_whitespace())
.take_while(|c|!c.is_whitespace())
.collect()
};
input_inner!{next, $($r)*}
};
}
macro_rules! input_inner {
($next:expr) => {};
($next:expr, ) => {};
($next:expr, $var:ident : $t:tt $($r:tt)*) => {
let $var = read_value!($next, $t);
input_inner!{$next $($r)*}
};
}
macro_rules! read_value {
($next:expr, ( $($t:tt),* )) => {
( $(read_value!($next, $t)),* )
};
($next:expr, [ $t:tt ; $len:expr ]) => {
(0..$len).map(|_| read_value!($next, $t)).collect::<Vec<_>>()
};
($next:expr, chars) => {
read_value!($next, String).chars().collect::<Vec<char>>()
};
($next:expr, char) => {
read_value!($next, String).chars().collect::<Vec<char>>()[0]
};
($next:expr, usize1) => {
read_value!($next, usize) - 1
};
($next:expr, isize1) => {
read_value!($next, isize) - 1
};
($next:expr, $t:ty) => {
$next().parse::<$t>().expect("Parse error")
};
}
macro_rules! debug {
($($a:expr),*) => {
println!(concat!($(stringify!($a), " = {:?}, "),*), $($a),*);
}
}
#[allow(dead_code)]
const MOD: usize = 1000000007;
#[allow(dead_code)]
fn to_num(c: char) -> i64 {
c as i64 - 48
}
//thanks to https://qiita.com/Cassin01/items/2f90aedded2b8fb017a1
struct Eratosthenes {
n: usize,
primes: Vec<usize>,
is_prime: Vec<bool>,
}
impl Eratosthenes {
fn new(n: usize) -> Self {
let mut spf = vec![None; n+1];
let mut is_prime = vec![true; n+1];
let mut primes = Vec::new();
is_prime[0] = false;
is_prime[1] = false;
for i in 2..n+1 {
if is_prime[i] {
primes.push(i);
spf[i] = Some(i);
}
for prime in &primes {
if i * prime >= n + 1 || prime > &spf[i].unwrap() {
break;
}
is_prime[i * prime] = false;
spf[i * prime] = Some(*prime);
}
}
Eratosthenes {
n: n,
primes: primes,
is_prime: is_prime
}
}
}
fn main() {
input!{
N: usize,
}
let mut cnt = vec![1; N+1];
for i in 2..N+1 {
let mut k = i;
let mut flag = true;
while flag {
flag = false;
for j in 2..i+1 {
if k % j == 0 {
k /= j;
cnt[j] += 1;
flag = true;
break;
}
}
}
}
println!("{}", cnt.into_iter().fold(1, |acc, x| acc * x % MOD));
} |
use anyhow::Result;
use std::path::PathBuf;
/// Returns path to executable
pub fn get_rcterm_exec_path() -> Result<PathBuf> {
Ok(std::env::current_dir()?.join("target/debug/rgit"))
}
|
// SPDX-FileCopyrightText: 2020 HH Partners
//
// SPDX-License-Identifier: MIT
pub mod algorithm;
pub mod annotation;
pub mod checksum;
pub mod creation_info;
pub mod document_creation_information;
pub mod doubleopen;
pub mod error;
pub mod external_document_reference;
pub mod external_package_reference;
pub mod file_information;
pub mod file_type;
pub mod license_list;
pub mod other_licensing_information_detected;
pub mod package_information;
pub mod package_verification_code;
pub mod relationship;
pub mod snippet;
pub mod spdx_expression;
pub use algorithm::*;
pub use annotation::*;
pub use checksum::*;
pub use creation_info::*;
pub use document_creation_information::*;
use error::SpdxError;
pub use external_document_reference::*;
pub use external_package_reference::*;
pub use file_information::*;
pub use file_type::*;
use log::info;
pub use other_licensing_information_detected::*;
pub use package_information::*;
pub use package_verification_code::*;
pub use relationship::*;
use serde::{Deserialize, Serialize};
pub use snippet::*;
pub use spdx_expression::*;
use std::{fs, io::BufReader, path::Path};
use uuid::Uuid;
use self::Relationship;
/// # SPDX 2.2
///
/// Store information about files in SPDX files. Latest spec
/// is currently 2.2. Can be serialized to JSON.
///
/// Spec: https://spdx.github.io/spdx-spec/
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct SPDX {
/// https://spdx.github.io/spdx-spec/2-document-creation-information/
#[serde(flatten)]
pub document_creation_information: DocumentCreationInformation,
/// https://spdx.github.io/spdx-spec/3-package-information/
#[serde(rename = "packages")]
#[serde(default)]
pub package_information: Vec<PackageInformation>,
/// https://spdx.github.io/spdx-spec/6-other-licensing-information-detected/
#[serde(rename = "hasExtractedLicensingInfos")]
#[serde(default)]
pub other_licensing_information_detected: Vec<OtherLicensingInformationDetected>,
/// https://spdx.github.io/spdx-spec/4-file-information/
#[serde(rename = "files")]
#[serde(default)]
pub file_information: Vec<FileInformation>,
/// https://spdx.github.io/spdx-spec/5-snippet-information/
#[serde(rename = "snippets")]
#[serde(default)]
pub snippet_information: Vec<Snippet>,
/// https://spdx.github.io/spdx-spec/7-relationships-between-SPDX-elements/
#[serde(default)]
pub relationships: Vec<Relationship>,
/// https://spdx.github.io/spdx-spec/8-annotations/
#[serde(default)]
pub annotations: Vec<Annotation>,
/// Counter for creating SPDXRefs. Is not part of the spec, so don't serialize.
#[serde(skip)]
pub spdx_ref_counter: i32,
}
impl SPDX {
/// Create new SPDX struct.
pub fn new(name: &str) -> Self {
info!("Creating SPDX.");
Self {
document_creation_information: DocumentCreationInformation {
document_name: name.to_string(),
spdx_document_namespace: format!(
"http://spdx.org/spdxdocs/{}-{}",
name.to_string(),
Uuid::new_v4()
),
..Default::default()
},
package_information: Vec::new(),
other_licensing_information_detected: Vec::new(),
file_information: Vec::new(),
relationships: Vec::new(),
spdx_ref_counter: 0,
annotations: Vec::new(),
snippet_information: Vec::new(),
}
}
/// Deserialize from file. Accepts json and yaml.
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Self, SpdxError> {
info!("Deserializing SPDX from {}", path.as_ref().display());
let path = path.as_ref();
let file = fs::File::open(&path)?;
let reader = BufReader::new(file);
match path
.extension()
.ok_or_else(|| SpdxError::PathExtension(path.to_string_lossy().to_string()))?
.to_str()
{
Some("yml") => Ok(serde_yaml::from_reader::<_, SPDX>(reader)?),
Some("json") => Ok(serde_json::from_reader::<_, SPDX>(reader)?),
None | Some(_) => Err(SpdxError::PathExtension(path.to_string_lossy().to_string())),
}
}
/// Get unique hashes for all files in all packages of the SPDX.
pub fn get_unique_hashes(&self, algorithm: Algorithm) -> Vec<String> {
info!("Getting unique hashes for files in SPDX.");
let mut unique_hashes: Vec<String> = Vec::new();
for file_information in self.file_information.iter() {
if let Some(checksum) = file_information
.file_checksum
.iter()
.find(|checksum| checksum.algorithm == algorithm)
{
unique_hashes.push(checksum.value.clone());
}
}
unique_hashes.sort();
unique_hashes.dedup();
unique_hashes
}
/// Save serialized SPDX as json,
pub fn save_as_json<P: AsRef<Path>>(&self, path: P) -> Result<(), SpdxError> {
println!("Saving to json...");
let json = serde_json::to_string_pretty(&self)?;
fs::write(path, json)?;
Ok(())
}
/// Find related files of the package with the provided id.
pub fn get_files_for_package(
&self,
package_spdx_id: &str,
) -> Vec<(&FileInformation, &Relationship)> {
info!("Finding related files for package {}.", &package_spdx_id);
let relationships = self
.relationships
.iter()
.filter(|relationship| relationship.spdx_element_id == package_spdx_id);
let mut result: Vec<(&FileInformation, &Relationship)> = Vec::new();
for relationship in relationships {
let file = self
.file_information
.iter()
.find(|file| file.file_spdx_identifier == relationship.related_spdx_element);
if let Some(file) = file {
result.push((&file, &relationship));
};
}
result
}
/// Get all license identifiers from the SPDX.
pub fn get_license_ids(&self) -> Vec<String> {
info!("Getting all license identifiers from SPDX.");
let mut license_ids = Vec::new();
for file in &self.file_information {
for license in &file.concluded_license.licenses() {
if !license_ids.contains(license) && license != "NOASSERTION" && license != "NONE" {
license_ids.push(license.clone());
}
}
}
license_ids
}
/// Get all relationships where the given SPDX ID is the SPDX element id.
pub fn relationships_for_spdx_id(&self, spdx_id: &str) -> Vec<&Relationship> {
self.relationships
.iter()
.filter(|relationship| relationship.spdx_element_id == spdx_id)
.collect()
}
/// Get all relationships where the given SPDX ID is the related SPDX element id.
pub fn relationships_for_related_spdx_id(&self, spdx_id: &str) -> Vec<&Relationship> {
self.relationships
.iter()
.filter(|relationship| relationship.related_spdx_element == spdx_id)
.collect()
}
}
#[cfg(test)]
mod test {
use chrono::prelude::*;
use super::*;
#[test]
fn deserialize_simple_spdx() {
let spdx_file = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx_file.document_creation_information.document_name,
"SPDX-Tools-v2.0".to_string()
);
}
mod correct_information_is_parsed_from_example_spdx {
use super::*;
mod document_creation_information {
use super::*;
#[test]
fn spdx_version() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.document_creation_information.spdx_version,
"SPDX-2.2".to_string()
);
}
#[test]
fn data_license() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(spdx.document_creation_information.data_license, "CC0-1.0");
}
#[test]
fn spdx_identifier() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.document_creation_information.spdx_identifier,
"SPDXRef-DOCUMENT".to_string()
);
}
#[test]
fn document_name() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.document_creation_information.document_name,
"SPDX-Tools-v2.0".to_string()
);
}
#[test]
fn spdx_document_namespace() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.document_creation_information.spdx_document_namespace,
"http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301"
.to_string()
);
}
#[test]
fn external_document_references() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert!(spdx
.document_creation_information
.external_document_references
.contains(&ExternalDocumentReference {
id_string: "DocumentRef-spdx-tool-1.2".to_string(),
checksum: Checksum {
algorithm: Algorithm::SHA1,
value: "d6a770ba38583ed4bb4525bd96e50461655d2759".to_string()
},
spdx_document_uri:
"http://spdx.org/spdxdocs/spdx-tools-v1.2-3F2504E0-4F89-41D3-9A0C-0305E82C3301"
.to_string()
}));
}
#[test]
fn license_list_version() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.document_creation_information
.creation_info
.license_list_version,
Some("3.9".to_string())
);
}
#[test]
fn creators() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert!(spdx
.document_creation_information
.creation_info
.creators
.contains(&"Tool: LicenseFind-1.0".to_string()));
assert!(spdx
.document_creation_information
.creation_info
.creators
.contains(&"Organization: ExampleCodeInspect ()".to_string()));
assert!(spdx
.document_creation_information
.creation_info
.creators
.contains(&"Person: Jane Doe ()".to_string()));
}
#[test]
fn created() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.document_creation_information.creation_info.created,
Utc.ymd(2010, 1, 29).and_hms(18, 30, 22)
);
}
#[test]
fn creator_comment() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.document_creation_information
.creation_info
.creator_comment,
Some(
r#"This package has been shipped in source and binary form.
The binaries were created with gcc 4.5.1 and expect to link to
compatible system run time libraries."#
.to_string()
)
);
}
#[test]
fn document_comment() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.document_creation_information.document_comment,
Some(
"This document was created using SPDX 2.0 using licenses from the web site."
.to_string()
)
);
}
}
mod package_information {
use super::*;
#[test]
fn all_packages_are_deserialized() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(spdx.package_information.len(), 4);
}
#[test]
fn package_name() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].package_name,
"glibc".to_string()
);
}
#[test]
fn package_spdx_identifier() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].package_spdx_identifier,
"SPDXRef-Package".to_string()
);
}
#[test]
fn package_version() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].package_version,
Some("2.11.1".to_string())
);
}
#[test]
fn package_file_name() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].package_file_name,
Some("glibc-2.11.1.tar.gz".to_string())
);
}
#[test]
fn package_supplier() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].package_supplier,
Some("Person: Jane Doe (jane.doe@example.com)".to_string())
);
}
#[test]
fn package_originator() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].package_originator,
Some("Organization: ExampleCodeInspect (contact@example.com)".to_string())
);
}
#[test]
fn package_download_location() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].package_download_location,
"http://ftp.gnu.org/gnu/glibc/glibc-ports-2.15.tar.gz".to_string()
);
}
#[test]
fn files_analyzed() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(spdx.package_information[0].files_analyzed, Some(true));
}
#[test]
fn package_verification_code() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].package_verification_code,
Some(PackageVerificationCode {
value: "d6a770ba38583ed4bb4525bd96e50461655d2758".to_string(),
excludes: vec!["./package.spdx".to_string()]
})
);
}
#[test]
fn package_chekcsum() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert!(spdx.package_information[0]
.package_checksum
.contains(&Checksum::new(
Algorithm::SHA1,
"85ed0817af83a24ad8da68c2b5094de69833983c"
)));
assert!(spdx.package_information[0]
.package_checksum
.contains(&Checksum::new(
Algorithm::MD5,
"624c1abb3664f4b35547e7c73864ad24"
)));
assert!(spdx.package_information[0]
.package_checksum
.contains(&Checksum::new(
Algorithm::SHA256,
"11b6d3ee554eedf79299905a98f9b9a04e498210b59f15094c916c91d150efcd"
)));
}
#[test]
fn package_home_page() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].package_home_page,
Some("http://ftp.gnu.org/gnu/glibc".to_string())
);
}
#[test]
fn source_information() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].source_information,
Some(
"uses glibc-2_11-branch from git://sourceware.org/git/glibc.git."
.to_string()
)
);
}
#[test]
fn concluded_license() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].concluded_license,
SPDXExpression("(LGPL-2.0-only OR LicenseRef-3)".to_string())
);
}
#[test]
fn all_licenses_information_from_files() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert!(spdx.package_information[0]
.all_licenses_information_from_files
.contains(&"GPL-2.0-only".to_string()));
assert!(spdx.package_information[0]
.all_licenses_information_from_files
.contains(&"LicenseRef-2".to_string()));
assert!(spdx.package_information[0]
.all_licenses_information_from_files
.contains(&"LicenseRef-1".to_string()));
}
#[test]
fn declared_license() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].declared_license,
SPDXExpression("(LGPL-2.0-only AND LicenseRef-3)".to_string())
);
}
#[test]
fn comments_on_license() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].comments_on_license,
Some("The license for this project changed with the release of version x.y. The version of the project included here post-dates the license change.".to_string())
);
}
#[test]
fn copyright_text() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].copyright_text,
"Copyright 2008-2010 John Smith".to_string()
);
}
#[test]
fn package_summary_description() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].package_summary_description,
Some("GNU C library.".to_string())
);
}
#[test]
fn package_detailed_description() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[0].package_detailed_description,
Some("The GNU C Library defines functions that are specified by the ISO C standard, as well as additional features specific to POSIX and other derivatives of the Unix operating system, and extensions specific to GNU systems.".to_string())
);
}
#[test]
fn package_comment() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.package_information[1].package_comment,
Some(
"This package was converted from a DOAP Project by the same name"
.to_string()
)
);
}
#[test]
fn external_reference() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert!(
spdx.package_information[0].external_reference.contains(&ExternalPackageReference {
reference_comment: Some("This is the external ref for Acme".to_string()),
reference_category: ExternalPackageReferenceCategory::Other,
reference_locator: "acmecorp/acmenator/4.1.3-alpha".to_string(),
reference_type: "http://spdx.org/spdxdocs/spdx-example-444504E0-4F89-41D3-9A0C-0305E82C3301#LocationRef-acmeforge".to_string()
})
);
assert!(spdx.package_information[0].external_reference.contains(
&ExternalPackageReference {
reference_comment: None,
reference_category: ExternalPackageReferenceCategory::Security,
reference_locator:
"cpe:2.3:a:pivotal_software:spring_framework:4.1.0:*:*:*:*:*:*:*"
.to_string(),
reference_type: "http://spdx.org/rdf/references/cpe23Type".to_string()
}
));
}
#[test]
fn package_attribution_text() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert!(
spdx.package_information[0].package_attribution_text.contains(&"The GNU C Library is free software. See the file COPYING.LIB for copying conditions, and LICENSES for notices about a few contributions that require these additional notices to be distributed. License copyright years may be listed using range notation, e.g., 1996-2015, indicating that every year in the range, inclusive, is a copyrightable year that would otherwise be listed individually.".to_string())
);
}
}
mod file_information {
use super::*;
#[test]
fn file_name() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.file_information[0].file_name,
"./src/org/spdx/parser/DOAPProject.java"
);
}
#[test]
fn file_spdx_identifier() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.file_information[0].file_spdx_identifier,
"SPDXRef-DoapSource"
);
}
#[test]
fn file_type() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(spdx.file_information[0].file_type, vec![FileType::Source]);
}
#[test]
fn file_checksum() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.file_information[0].file_checksum,
vec![Checksum {
algorithm: Algorithm::SHA1,
value: "2fd4e1c67a2d28fced849ee1bb76e7391b93eb12".to_string()
}]
);
}
#[test]
fn concluded_license() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.file_information[0].concluded_license,
SPDXExpression("Apache-2.0".to_string())
);
}
#[test]
fn license_information_in_file() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.file_information[0].license_information_in_file,
vec!["Apache-2.0".to_string()]
);
}
#[test]
fn comments_on_license() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.file_information[2].comments_on_license,
Some("This license is used by Jena".to_string())
);
}
#[test]
fn copyright_text() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.file_information[0].copyright_text,
"Copyright 2010, 2011 Source Auditor Inc.".to_string()
);
}
#[test]
fn file_comment() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.file_information[1].file_comment,
Some("This file is used by Jena".to_string())
);
}
#[test]
fn file_notice() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.file_information[1].file_notice,
Some("Apache Commons Lang\nCopyright 2001-2011 The Apache Software Foundation\n\nThis product includes software developed by\nThe Apache Software Foundation (http://www.apache.org/).\n\nThis product includes software from the Spring Framework,\nunder the Apache License 2.0 (see: StringUtils.containsWhitespace())".to_string())
);
}
#[test]
fn file_contributor() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.file_information[1].file_contributor,
vec!["Apache Software Foundation".to_string()]
);
}
}
mod snippet_information {
use super::*;
#[test]
fn snippet_spdx_identifier() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.snippet_information[0].snippet_spdx_identifier,
"SPDXRef-Snippet".to_string()
);
}
#[test]
fn snippet_from_file_spdx_identifier() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.snippet_information[0].snippet_from_file_spdx_identifier,
"SPDXRef-DoapSource".to_string()
);
}
#[test]
fn ranges() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.snippet_information[0].ranges,
vec![
Range {
end_pointer: EndPointer {
line_number: Some(23),
reference: Some("SPDXRef-DoapSource".to_string()),
offset: None
},
start_pointer: StartPointer {
line_number: Some(5),
reference: Some("SPDXRef-DoapSource".to_string()),
offset: None
}
},
Range {
end_pointer: EndPointer {
line_number: None,
reference: Some("SPDXRef-DoapSource".to_string()),
offset: Some(420)
},
start_pointer: StartPointer {
line_number: None,
reference: Some("SPDXRef-DoapSource".to_string()),
offset: Some(310)
}
},
]
);
}
#[test]
fn snippet_concluded_license() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.snippet_information[0].snippet_concluded_license,
SPDXExpression("GPL-2.0-only".to_string())
);
}
#[test]
fn license_information_in_snippet() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.snippet_information[0].license_information_in_snippet,
vec!["GPL-2.0-only".to_string()]
);
}
#[test]
fn snippet_comments_on_license() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.snippet_information[0].snippet_comments_on_license,
Some("The concluded license was taken from package xyz, from which the snippet was copied into the current file. The concluded license information was found in the COPYING.txt file in package xyz.".to_string())
);
}
#[test]
fn snippet_copyright_text() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.snippet_information[0].snippet_copyright_text,
"Copyright 2008-2010 John Smith".to_string()
);
}
#[test]
fn snippet_comment() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.snippet_information[0].snippet_comment,
Some("This snippet was identified as significant and highlighted in this Apache-2.0 file, when a commercial scanner identified it as being derived from file foo.c in package xyz which is licensed under GPL-2.0.".to_string())
);
}
#[test]
fn snippet_name() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.snippet_information[0].snippet_name,
Some("from linux kernel".to_string())
);
}
}
mod other_licensing_information_detected {
use super::*;
#[test]
fn license_identifier() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.other_licensing_information_detected[0].license_identifier,
"LicenseRef-Beerware-4.2".to_string()
)
}
#[test]
fn extracted_text() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(spdx.other_licensing_information_detected[0].extracted_text, "\"THE BEER-WARE LICENSE\" (Revision 42):\nphk@FreeBSD.ORG wrote this file. As long as you retain this notice you\ncan do whatever you want with this stuff. If we meet some day, and you think this stuff is worth it, you can buy me a beer in return Poul-Henning Kamp </\nLicenseName: Beer-Ware License (Version 42)\nLicenseCrossReference: http://people.freebsd.org/~phk/\nLicenseComment: \nThe beerware license has a couple of other standard variants.")
}
#[test]
fn license_name() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.other_licensing_information_detected[2].license_name,
"CyberNeko License".to_string()
)
}
#[test]
fn license_cross_reference() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.other_licensing_information_detected[2].license_cross_reference,
vec![
"http://people.apache.org/~andyc/neko/LICENSE".to_string(),
"http://justasample.url.com".to_string()
]
)
}
#[test]
fn license_comment() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.other_licensing_information_detected[2].license_comment,
Some("This is tye CyperNeko License".to_string())
)
}
}
mod relationships_between_spdx_elements {
use super::*;
#[test]
fn spdx_element_id() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.relationships[0].spdx_element_id,
"SPDXRef-DOCUMENT".to_string()
);
}
#[test]
fn related_spdx_element() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.relationships[0].related_spdx_element,
"SPDXRef-Package".to_string()
);
}
#[test]
fn relationship_type() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.relationships[0].relationship_type,
RelationshipType::Contains
);
assert_eq!(
spdx.relationships[2].relationship_type,
RelationshipType::CopyOf
);
}
}
mod annotations {
use super::*;
#[test]
fn annotator() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.annotations[0].annotator,
"Person: Jane Doe ()".to_string()
);
}
#[test]
fn annotation_date() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.annotations[0].annotation_date,
Utc.ymd(2010, 1, 29).and_hms(18, 30, 22)
);
}
#[test]
fn annotation_type() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(spdx.annotations[0].annotation_type, AnnotationType::Other);
}
#[test]
fn annotation_comment() {
let spdx = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
assert_eq!(
spdx.annotations[0].annotation_comment,
"Document level annotation"
);
}
}
}
#[test]
fn find_related_files_for_package() {
let spdx_file = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
let package_1_files = spdx_file.get_files_for_package("SPDXRef-Package");
assert_eq!(package_1_files.len(), 1);
let file = package_1_files
.iter()
.find(|package_and_relationship| {
package_and_relationship.0.file_name == *"./lib-source/jena-2.6.3-sources.jar"
})
.expect("Should always be found");
assert_eq!(file.0.file_spdx_identifier, "SPDXRef-JenaLib");
assert_eq!(file.1.relationship_type, RelationshipType::Contains);
assert_eq!(
file.0.concluded_license,
SPDXExpression("LicenseRef-1".into())
);
}
#[test]
fn get_all_licenses_from_spdx() {
let spdx_file = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
let mut actual = spdx_file.get_license_ids();
actual.sort();
let mut expected: Vec<String> = vec![
"Apache-2.0".into(),
"LicenseRef-1".into(),
"LGPL-2.0-only".into(),
"LicenseRef-2".into(),
];
expected.sort();
assert_eq!(expected, actual);
}
#[test]
fn get_relationships_for_spdx_id() {
let spdx_file = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
let relationships = spdx_file.relationships_for_spdx_id("SPDXRef-Package");
let relationship_1 = Relationship {
spdx_element_id: "SPDXRef-Package".into(),
related_spdx_element: "SPDXRef-Saxon".into(),
relationship_type: RelationshipType::DynamicLink,
comment: None,
};
let relationship_2 = Relationship {
spdx_element_id: "SPDXRef-Package".into(),
related_spdx_element: "SPDXRef-JenaLib".into(),
relationship_type: RelationshipType::Contains,
comment: None,
};
let expected_relationships = vec![&relationship_1, &relationship_2];
assert_eq!(relationships, expected_relationships)
}
#[test]
fn get_relationships_for_related_spdx_id() {
let spdx_file = SPDX::from_file("tests/data/SPDXJSONExample-v2.2.spdx.json").unwrap();
let relationships = spdx_file.relationships_for_related_spdx_id("SPDXRef-Package");
let relationship_1 = Relationship {
spdx_element_id: "SPDXRef-DOCUMENT".into(),
related_spdx_element: "SPDXRef-Package".into(),
relationship_type: RelationshipType::Contains,
comment: None,
};
let relationship_2 = Relationship {
spdx_element_id: "SPDXRef-DOCUMENT".into(),
related_spdx_element: "SPDXRef-Package".into(),
relationship_type: RelationshipType::Describes,
comment: None,
};
let relationship_3 = Relationship {
spdx_element_id: "SPDXRef-JenaLib".into(),
related_spdx_element: "SPDXRef-Package".into(),
relationship_type: RelationshipType::Contains,
comment: None,
};
let expected_relationships = vec![&relationship_1, &relationship_2, &relationship_3];
assert_eq!(relationships, expected_relationships)
}
}
|
mod io;
pub use io::*;
pub trait ActorDirectiveT<Data> {
fn exit_loop(&self) -> bool;
fn input(self) -> Option<Data>;
}
#[async_trait::async_trait]
pub trait Actor<D, I, O, IO>
where I: ActorDirectiveT<D>,
IO: ActorI<I> + ActorO<O> + Send + 'static,
O: Send,
D: Send,
{
async fn actor_loop(&mut self, mut io: IO) {
loop {
let data = match io.recv().await {
Some(d) if d.exit_loop() => return,
Some(d) => d.input().expect("input directive"),
None => return,
};
let output = self.on_input(data).await;
if io.send(output).await.is_err() {
return;
}
}
}
async fn on_input(&mut self, data: D) -> O;
}
pub enum ActorDirective<D> {
#[allow(dead_code)]
ExitLoop,
Input(D),
}
impl<D> ActorDirectiveT<D> for ActorDirective<D> {
fn exit_loop(&self) -> bool {
matches!(self, Self::ExitLoop)
}
fn input(self) -> Option<D> {
if let Self::Input(data) = self {
Some(data)
} else {
None
}
}
}
|
#[doc = "Reader of register CM4_PWR_CTL"]
pub type R = crate::R<u32, super::CM4_PWR_CTL>;
#[doc = "Writer for register CM4_PWR_CTL"]
pub type W = crate::W<u32, super::CM4_PWR_CTL>;
#[doc = "Register CM4_PWR_CTL `reset()`'s with value 0xfa05_0001"]
impl crate::ResetValue for super::CM4_PWR_CTL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0xfa05_0001
}
}
#[doc = "Set Power mode for CM4\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PWR_MODE_A {
#[doc = "0: Switch CM4 off\nPower off, clock off, isolate, reset and no retain."]
OFF,
#[doc = "1: Reset CM4\nClock off, no isolated, no retain and reset.\n\nNote: The CM4 CPU has a AIRCR.SYSRESETREQ register field that allows the CM4 to reset the complete device (RESET only resets the CM4), resulting in a warm boot."]
RESET,
#[doc = "2: Put CM4 in Retained mode\nThis can only become effective if CM4 is in SleepDeep mode. Check PWR_DONE flag to see if CM4 RETAINED state has been reached.\nPower off, clock off, isolate, no reset and retain."]
RETAINED,
#[doc = "3: Switch CM4 on.\nPower on, clock on, no isolate, no reset and no retain."]
ENABLED,
}
impl From<PWR_MODE_A> for u8 {
#[inline(always)]
fn from(variant: PWR_MODE_A) -> Self {
match variant {
PWR_MODE_A::OFF => 0,
PWR_MODE_A::RESET => 1,
PWR_MODE_A::RETAINED => 2,
PWR_MODE_A::ENABLED => 3,
}
}
}
#[doc = "Reader of field `PWR_MODE`"]
pub type PWR_MODE_R = crate::R<u8, PWR_MODE_A>;
impl PWR_MODE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PWR_MODE_A {
match self.bits {
0 => PWR_MODE_A::OFF,
1 => PWR_MODE_A::RESET,
2 => PWR_MODE_A::RETAINED,
3 => PWR_MODE_A::ENABLED,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `OFF`"]
#[inline(always)]
pub fn is_off(&self) -> bool {
*self == PWR_MODE_A::OFF
}
#[doc = "Checks if the value of the field is `RESET`"]
#[inline(always)]
pub fn is_reset(&self) -> bool {
*self == PWR_MODE_A::RESET
}
#[doc = "Checks if the value of the field is `RETAINED`"]
#[inline(always)]
pub fn is_retained(&self) -> bool {
*self == PWR_MODE_A::RETAINED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == PWR_MODE_A::ENABLED
}
}
#[doc = "Write proxy for field `PWR_MODE`"]
pub struct PWR_MODE_W<'a> {
w: &'a mut W,
}
impl<'a> PWR_MODE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: PWR_MODE_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Switch CM4 off Power off, clock off, isolate, reset and no retain."]
#[inline(always)]
pub fn off(self) -> &'a mut W {
self.variant(PWR_MODE_A::OFF)
}
#[doc = "Reset CM4 Clock off, no isolated, no retain and reset. Note: The CM4 CPU has a AIRCR.SYSRESETREQ register field that allows the CM4 to reset the complete device (RESET only resets the CM4), resulting in a warm boot."]
#[inline(always)]
pub fn reset(self) -> &'a mut W {
self.variant(PWR_MODE_A::RESET)
}
#[doc = "Put CM4 in Retained mode This can only become effective if CM4 is in SleepDeep mode. Check PWR_DONE flag to see if CM4 RETAINED state has been reached. Power off, clock off, isolate, no reset and retain."]
#[inline(always)]
pub fn retained(self) -> &'a mut W {
self.variant(PWR_MODE_A::RETAINED)
}
#[doc = "Switch CM4 on. Power on, clock on, no isolate, no reset and no retain."]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(PWR_MODE_A::ENABLED)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x03) | ((value as u32) & 0x03);
self.w
}
}
#[doc = "Reader of field `VECTKEYSTAT`"]
pub type VECTKEYSTAT_R = crate::R<u16, u16>;
impl R {
#[doc = "Bits 0:1 - Set Power mode for CM4"]
#[inline(always)]
pub fn pwr_mode(&self) -> PWR_MODE_R {
PWR_MODE_R::new((self.bits & 0x03) as u8)
}
#[doc = "Bits 16:31 - Register key (to prevent accidental writes). - Should be written with a 0x05fa key value for the write to take effect. - Always reads as 0xfa05."]
#[inline(always)]
pub fn vectkeystat(&self) -> VECTKEYSTAT_R {
VECTKEYSTAT_R::new(((self.bits >> 16) & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:1 - Set Power mode for CM4"]
#[inline(always)]
pub fn pwr_mode(&mut self) -> PWR_MODE_W {
PWR_MODE_W { w: self }
}
}
|
use std::ops::*;
type V = usize;
#[derive(Copy, Clone, Debug)]
pub struct E<W> {
pub to: V,
pub cost: W,
}
#[derive(Clone, Debug)]
pub struct Graph<W> {
pub es: Vec<Vec<E<W>>>,
}
struct Entry<W>(V, W);
impl_cmp!(Entry<W>; |a, b| b.1.partial_cmp(&a.1).unwrap(); where W: PartialOrd);
impl<W> Graph<W> where W: Copy + Default + PartialOrd + Add<Output = W> {
pub fn new(n: usize) -> Graph<W> {
Graph { es: vec![vec![]; n] }
}
pub fn add(&mut self, v: V, to: V, cost: W) {
self.es[v].push(E { to: to, cost: cost });
}
/// Compute [(dist, prev); n].
/// dist[v] := dist(s, v). dist[v] = 0 when v is unreachable.
/// prev[s] = !1. prev[v] = !0 when v is unreachable.
pub fn solve(&self, s: V) -> (Vec<V>, Vec<(W, V)>) {
let n = self.es.len();
let mut fixed = vec![false; n]; // For avoiding negative loops due to floating point error
let mut dp = vec![(W::default(), !0); n];
let mut que = ::std::collections::BinaryHeap::new();
dp[s] = (W::default(), !1);
que.push(Entry(s, W::default()));
let mut list = vec![];
while let Some(Entry(u, d)) = que.pop() {
if fixed[u] { continue }
fixed[u] = true;
list.push(u);
for e in &self.es[u] {
let v = e.to;
let d2 = d + e.cost;
if !fixed[v] && (dp[v].1 == !0 || dp[v].0 > d2) {
dp[v] = (d2, u);
que.push(Entry(v, d2));
}
}
}
(list, dp)
}
pub fn get_path(dp: &[(W, V)], mut t: V) -> Vec<V> {
if dp[t].1 == !0 { return vec![] }
let mut path = vec![];
while t != !1 {
path.push(t);
t = dp[t].1;
}
path.into_iter().rev().collect()
}
}
|
use game::cardmarco::*;
pub struct Board {
pub players: Vec<Player>,
}
enum_number!(PlayerEnum {
Player1=0,
Player2=1,
Player3=2,
Player4=3,
Player5=4,
});
impl Board {
pub fn new(player_num: i32) -> Board {
fn r_player_enum(i: i32) -> PlayerEnum {
match i {
0 => PlayerEnum::Player1,
1 => PlayerEnum::Player2,
2 => PlayerEnum::Player3,
3 => PlayerEnum::Player4,
_ => PlayerEnum::Player5,
}
}
let mut v = Vec::new();
for i in 0..player_num {
v.push(Player {
gain: 0,
money: 0,
left: r_player_enum((i + 1).modulo(player_num)),
adjacent: (r_player_enum((i + 1).modulo(player_num)),
r_player_enum((i - 1).abs())),
})
}
Board { players: v }
}
}
pub struct Player {
pub gain: i32,
pub money: i32,
pub left: PlayerEnum,
pub adjacent: (PlayerEnum, PlayerEnum),
}
|
use std::env::current_dir;
use std::path::PathBuf;
use anyhow::{Context, Result};
use dirs::home_dir;
use crate::cfg::Cfg;
type LocalDir = PathBuf;
type GlobalDir = PathBuf;
pub fn reach_directories() -> Result<(LocalDir, GlobalDir)> {
let local_dir = current_dir().context("fail to found current directory")?;
let global_dir = home_dir().context("fail to found home directory")?;
Ok((local_dir, global_dir))
}
pub fn get_cfg() -> Result<Cfg> {
let (local_dir, global_dir) = reach_directories()?;
Cfg::load_local(global_dir, local_dir).context("fail to load cfg \"short.yaml\"")
}
pub fn create_cfg() -> Result<Cfg> {
let (local_dir, global_dir) = reach_directories()?;
Cfg::create_local(global_dir, local_dir).context("fail to create cfg")
}
|
use std::rc::Rc;
use crate::RrtHittable::Hittable;
use crate::RrtHittable::hit_record;
use crate::RrtRay::Ray;
#[derive(Clone)]
pub struct HittableList {
hittables : Vec<Rc<dyn Hittable>>
}
impl HittableList {
pub fn new() -> HittableList {
HittableList {
hittables : Vec::new()
}
}
/*
* @TODO check if it's an acceptable solution -> HittableList
*/
pub fn add(mut self, hittable : Rc<dyn Hittable>) -> HittableList {
self.hittables.push(hittable);
self
}
pub fn clear(mut self) {
self.hittables.clear();
}
pub fn hit(&self, ray : Ray, t_min : f64, t_max : f64, mut rec : hit_record) -> bool {
let temp_rec : hit_record = hit_record::new();
let mut hit_anything : bool = false;
let mut closest_so_far : f64 = t_max;
for object in self.hittables.iter() {
if object.hit(ray, t_min, closest_so_far, temp_rec) {
hit_anything = true;
closest_so_far = temp_rec.t;
rec = temp_rec;
}
}
return hit_anything;
}
} |
use super::*;
use crate::construction::heuristics::InsertionContext;
use crate::solver::mutation::select_seed_job;
use crate::solver::RefinementContext;
/// A ruin strategy which removes random jobs from solution.
pub struct RandomJobRemoval {
/// Specifies limitation for job removal.
limits: RuinLimits,
}
impl RandomJobRemoval {
/// Creates a new instance of `RandomJobRemoval`.
pub fn new(limits: RuinLimits) -> Self {
Self { limits }
}
}
impl Default for RandomJobRemoval {
fn default() -> Self {
Self::new(RuinLimits::default())
}
}
impl Ruin for RandomJobRemoval {
fn run(&self, _refinement_ctx: &RefinementContext, mut insertion_ctx: InsertionContext) -> InsertionContext {
if insertion_ctx.solution.routes.is_empty() {
return insertion_ctx;
}
let affected = self.limits.get_chunk_size(&insertion_ctx);
(0..affected).for_each(|_| {
let solution = &mut insertion_ctx.solution;
if let Some((route_index, job)) = select_seed_job(&solution.routes, &insertion_ctx.environment.random) {
if !solution.locked.contains(&job) {
solution.routes.get_mut(route_index).unwrap().route_mut().tour.remove(&job);
solution.required.push(job);
}
}
});
insertion_ctx
}
}
|
// Copyright 2019, 2020 Wingchain
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#[macro_use]
extern crate crypto;
use ed25519_dalek::{Keypair as DalekKeyPair, PublicKey, SecretKey, Signature};
use rand::rngs::OsRng;
use sha2::Sha512;
use crypto::dsa::{CLength, Dsa, KeyPair as KeyPairT, Verifier as VerifierT};
use crypto::DsaLength;
pub struct Ed25519;
#[derive(Debug)]
pub struct KeyPair(DalekKeyPair);
pub struct Verifier(PublicKey);
impl Dsa for Ed25519 {
type Error = ();
type KeyPair = KeyPair;
type Verifier = Verifier;
fn name(&self) -> String {
"ed25519".to_string()
}
fn length(&self) -> DsaLength {
DsaLength::DsaLength32_32_64
}
fn generate_key_pair(&self) -> Result<Self::KeyPair, Self::Error> {
let mut csprng = OsRng::new().map_err(|_| ())?;
let key_pair = KeyPair(DalekKeyPair::generate::<Sha512, _>(&mut csprng));
Ok(key_pair)
}
fn key_pair_from_secret_key(&self, secret_key: &[u8]) -> Result<Self::KeyPair, Self::Error> {
let secret_key = SecretKey::from_bytes(secret_key).map_err(|_| ())?;
let public_key = PublicKey::from_secret::<Sha512>(&secret_key);
let key_pair = KeyPair(DalekKeyPair {
secret: secret_key,
public: public_key,
});
Ok(key_pair)
}
fn verifier_from_public_key(&self, public_key: &[u8]) -> Result<Self::Verifier, Self::Error> {
let public_key = PublicKey::from_bytes(public_key).map_err(|_| ())?;
Ok(Verifier(public_key))
}
}
impl KeyPairT for KeyPair {
fn public_key(&self, out: &mut [u8]) {
let public = self.0.public.as_bytes();
out.copy_from_slice(public);
}
fn secret_key(&self, out: &mut [u8]) {
let secret = self.0.secret.as_bytes();
out.copy_from_slice(secret);
}
fn sign(&self, message: &[u8], out: &mut [u8]) {
let signature = self.0.sign::<Sha512>(message).to_bytes();
out.copy_from_slice(&signature);
}
}
impl VerifierT for Verifier {
type Error = ();
fn verify(&self, message: &[u8], signature: &[u8]) -> Result<(), Self::Error> {
let signature = Signature::from_bytes(signature).map_err(|_| ())?;
self.0.verify::<Sha512>(message, &signature).map_err(|_| ())
}
}
declare_dsa_custom_lib!(Ed25519);
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ed25519_generate_key_pair() {
assert!(Ed25519.generate_key_pair().is_ok());
}
#[test]
fn test_ed25519_key_pair_from_secret_key() {
let secret: [u8; 32] = [
184, 80, 22, 77, 31, 238, 200, 105, 138, 204, 163, 41, 148, 124, 152, 133, 189, 29,
148, 3, 77, 47, 187, 230, 8, 5, 152, 173, 190, 21, 178, 152,
];
assert!(Ed25519.key_pair_from_secret_key(&secret).is_ok());
}
#[test]
fn test_ed25519_key_pair() {
let secret: [u8; 32] = [
184, 80, 22, 77, 31, 238, 200, 105, 138, 204, 163, 41, 148, 124, 152, 133, 189, 29,
148, 3, 77, 47, 187, 230, 8, 5, 152, 173, 190, 21, 178, 152,
];
let key_pair = Ed25519.key_pair_from_secret_key(&secret).unwrap();
let mut public_key = [0u8; 32];
key_pair.public_key(&mut public_key);
assert_eq!(
public_key.to_vec(),
vec![
137, 44, 137, 164, 205, 99, 29, 8, 218, 49, 70, 7, 34, 56, 20, 119, 86, 4, 83, 90,
5, 245, 14, 149, 157, 33, 32, 157, 1, 116, 14, 186
]
);
let message: Vec<u8> = vec![97, 98, 99];
let mut signature = [0u8; 64];
key_pair.sign(&message, &mut signature);
assert_eq!(
signature.to_vec(),
vec![
82, 19, 26, 105, 235, 178, 54, 112, 61, 224, 195, 88, 150, 137, 32, 46, 235, 209,
209, 108, 64, 153, 12, 58, 216, 179, 88, 38, 49, 167, 162, 103, 219, 116, 93, 187,
145, 86, 216, 98, 97, 135, 228, 15, 66, 246, 207, 232, 132, 182, 211, 206, 12, 220,
4, 96, 58, 254, 237, 8, 151, 3, 172, 14
]
);
let verifier = Ed25519.verifier_from_public_key(&public_key).unwrap();
let result = verifier.verify(&message, &signature);
assert!(result.is_ok());
}
}
|
use std::ops::{Add, Mul};
#[cfg(test)]
mod tests {
#[test]
// list comprehension of sorts
fn iterators() {
let mapper = |v| ((v * 1235) + 2) / (4 * 16);
let reducer = |mut acc: Vec<i32>, curr: i32| {
acc.push(mapper(curr));
acc
};
type V = Vec<i32>;
let range = || 0..5;
let exp: V = vec![0, 19, 38, 57, 77];
let res3: V = {
let mut vec: Vec<i32> = vec![];
for i in range() {
vec.push(mapper(i));
}
vec
};
let res: V = range().fold(vec![], reducer);
let res2: V = range().map(mapper).collect();
for case in &[res, res2, res3] {
assert_eq!(case, &exp)
}
}
#[test]
fn pipe_() {
fn times(a: u32, b: u32) -> u32 {
a * b
}
//
// let add5 = add(5);
// let sub3 = add(-3);
// let double = mult(2);
//
fn add2(x: i32) -> i32 {
x + 2
}
assert_eq!(pipe!(10 => add2), 12)
}
}
#[cfg(test)]
mod composition {
trait Ops <T> {
fn double(&self) -> T;
}
impl Ops <isize> for isize {
fn double(&self) -> isize {
self * 2
}
}
#[test]
fn adds() {
assert_eq!(23.double().double(), 92)
}
}
fn main() {
let adder = |x: i32| move |y: i32| x + y;
}
pub fn add<T: Add>(x: i32) -> impl Fn(i32) -> i32 {
move |y| x + y
}
pub fn mult<T: Mul + Copy>(x: i32) -> impl Fn(i32) -> i32 {
move |y| x * y
}
pub fn pipe<T>(init: i32, funcs: &[fn(i32) -> i32]) -> i32 {
0
}
pub trait OptionMutExt<T> {
/// Replace the existing `Some` value with a new one.
///
/// Returns the previous value if it was present, or `None` if no replacement was made.
fn replace(&mut self, val: T) -> Option<T>;
/// Replace the existing `Some` value with the result of given closure.
///
/// Returns the previous value if it was present, or `None` if no replacement was made.
fn replace_with<F: FnOnce() -> T>(&mut self, f: F) -> Option<T>;
}
impl<T> OptionMutExt<T> for Option<T> {
fn replace(&mut self, val: T) -> Option<T> {
self.replace_with(move || val)
}
fn replace_with<F: FnOnce() -> T>(&mut self, f: F) -> Option<T> {
if self.is_some() {
let result = self.take();
*self = Some(f());
result
} else {
None
}
}
}
|
// src/code_test.rs
use super::code::*;
#[test]
fn test_make() {
let tests = vec![
(
Opcode::OpConstant,
vec![65534],
vec![Opcode::OpConstant as u8, 255, 254],
),
(Opcode::OpAdd, Vec::new(), vec![Opcode::OpAdd as u8]),
(
Opcode::OpGetLocal,
vec![255],
vec![Opcode::OpGetLocal as u8, 255],
),
(
Opcode::OpClosure,
vec![65534, 255],
vec![Opcode::OpClosure as u8, 255, 254, 255],
),
];
for tt in tests.iter() {
let instruction = make(tt.0.clone(), &tt.1);
assert!(
instruction.0.len() == tt.2.len(),
"instruction has wrong length. want={}, got={}",
tt.2.len(),
instruction.0.len()
);
for (i, b) in tt.2.iter().enumerate() {
assert!(
instruction.0[i] == tt.2[i],
"wrong byte at pos {}. want={}, got={}",
i,
b,
instruction.0[i]
);
}
}
}
#[test]
fn test_instructions_string() {
let instructions = vec![
make(Opcode::OpAdd, &Vec::new()),
make(Opcode::OpGetLocal, &vec![1]),
make(Opcode::OpConstant, &vec![2]),
make(Opcode::OpConstant, &vec![65535]),
make(Opcode::OpClosure, &vec![65535, 255]),
];
let expected = "0000 OpAdd
0001 OpGetLocal 1
0003 OpConstant 2
0006 OpConstant 65535
0009 OpClosure 65535 255
";
let mut concatted = Instructions::new();
for ins in instructions {
concatted.0.extend_from_slice(&ins.0);
}
assert!(
concatted.string() == expected,
"instructions wrongly formatted.\nwant={:?}\ngot={:?}",
expected,
concatted
);
}
#[test]
fn test_read_operands() {
let tests = [
(Opcode::OpConstant, vec![65535], 2),
(Opcode::OpGetLocal, vec![255], 1),
(Opcode::OpClosure, vec![65535, 255], 3),
];
for tt in tests.iter() {
let instruction = make(tt.0.clone(), &tt.1);
match lookup(tt.0.clone() as u8) {
Ok(def) => {
let (operands_read, n) = read_operands(&def, &instruction.0[1..]);
assert!(n == tt.2, "n wrong. want={}, got={}", tt.2, n);
for (i, want) in tt.1.iter().enumerate() {
assert!(
operands_read[i] == *want,
"operand wong. want={}, got{}",
want,
operands_read[i]
);
}
}
Err(e) => {
assert!(false, "definition not found: {:?}", e);
}
}
}
}
|
use std::sync::{Mutex,RwLock,Arc,Barrier,Weak};
//use adminServer::AdminServer;
use log::Log;
use serverConfig::ServerConfig;
use gameState::GameState;
use storage::Storage;
use httpRequester::HTTPRequester;
use server::Server;
use map::Map;
pub struct AppData{
pub log:Log,
pub serverConfig:ServerConfig,
pub isEditor:bool,
pub gameState:RwLock<GameState>,
pub storage:RwLock<Option<Arc<Storage>>>,
pub httpRequester:RwLock<Option<Arc<HTTPRequester>>>,
pub server: RwLock<Option<Arc<Server>>>,
pub map: RwLock<Option<Arc<Map>>>,
//pub adminServer:RwLock< Option< Arc<AdminServer> > >,
//pub shouldStop:RwLock<bool>,
}
impl AppData{
pub fn initialize( serverConfig:ServerConfig, log:Log, isEditor:bool ) -> Arc<AppData> {
let appData=AppData{
log:log,
serverConfig:serverConfig,
isEditor:isEditor,
gameState:RwLock::new(GameState::Initializing),
storage:RwLock::new(None),
httpRequester:RwLock::new(None),
server: RwLock::new(None),
map: RwLock::new(None),
//adminServer:RwLock::new(None),
//shouldStop:RwLock::new(false),
};
Arc::new(appData)
}
pub fn destroy( appData:Arc<AppData> ) {
//==================Stop the server==================
let server=(*appData.server.read().unwrap()).clone();
match server{
Some ( s ) => Server::stop(s),
None=>{},
}
//==================Stop the httpRequester==================
let httpRequester=(*appData.httpRequester.read().unwrap()).clone();
match httpRequester{
Some ( r ) => HTTPRequester::destroy(r),
None=>{},
}
//==================Destroy storage==================
let storage=(*appData.storage.read().unwrap()).clone();
match storage{
Some ( m ) => Storage::destroy(m),
None=>{},
}
}
pub fn getHTTPRequesterAnd<T,F>(&self, f:F) -> T where F:FnOnce(&HTTPRequester) -> T {
match *self.httpRequester.read().unwrap(){
Some( ref httpRequester) => {
f( httpRequester )
},
None=>panic!("No httpRequester"),
}
}
pub fn getServerAnd<T,F>(&self, f:F) -> T where F:FnOnce(&Server) -> T {
match *self.server.read().unwrap(){
Some( ref server) => {
f( server )
},
None=>panic!("No server"),
}
}
}
|
use fdio::{fdio_sys, ioctl_raw};
use std::os::raw;
use failure::Error;
const PTY_EVENT_HANGUP: u8 = 1;
const PTY_EVENT_INTERRUPT: u8 = 2;
const PTY_EVENT_SUSPEND: u8 = 4;
const PTY_EVENT_MASK: u8 = 7;
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct pty_clr_set_t {
pub clr: u32,
pub set: u32,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct pty_window_size_t {
pub width: u32,
pub height: u32,
}
pub fn get_window_size() -> Result<pty_window_size_t, Error> {
let window = pty_window_size_t {
width: 0,
height: 0,
};
let success = unsafe {
ioctl_raw(0,
IOCTL_PTY_GET_WINDOW_SIZE,
::std::ptr::null_mut() as *mut raw::c_void,
0,
&window as *const _ as *mut raw::c_void,
::std::mem::size_of::<pty_window_size_t>())
};
if success < 0 {
Err(format_err!("get_window_size Ioctl failure"))
} else {
Ok(window)
}
}
const IOCTL_PTY_GET_WINDOW_SIZE: raw::c_int = make_ioctl!(
fdio_sys::IOCTL_KIND_DEFAULT,
fdio_sys::IOCTL_FAMILY_PTY,
0x01
);
const IOCTL_PTY_SET_WINDOW_SIZE: raw::c_int = make_ioctl!(
fdio_sys::IOCTL_KIND_DEFAULT,
fdio_sys::IOCTL_FAMILY_PTY,
0x20
);
|
use cpu::*;
use mem::Memory;
use hw::HW;
use hw::storage;
use hw::display;
use std::io::prelude::*;
pub struct ByteBdaEntry
{
idx: u8
}
impl ByteBdaEntry
{
pub fn new(idx: u8) -> ByteBdaEntry
{
ByteBdaEntry { idx: idx }
}
pub fn get(&self, mem: &Memory) -> u8
{
mem.read_u8(0x400 + self.idx as u32)
}
pub fn set(&self, mem: &mut Memory, val: u8)
{
mem.write_u8(0x400 + self.idx as u32, val)
}
}
pub struct WordBdaEntry
{
idx: u8
}
impl WordBdaEntry
{
pub fn new(idx: u8) -> WordBdaEntry
{
WordBdaEntry { idx: idx }
}
/*
pub fn get(&self, mem: &Memory) -> u16
{
mem.read_u16(0x400 + self.idx as u32)
}*/
pub fn set(&self, mem: &mut Memory, val: u16)
{
mem.write_u16(0x400 + self.idx as u32, val)
}
}
/* The BIOS should be mostly stateless and simply dispatch calls to the hardware
* In the BIOS call implementations in hardware emulation code, the state should
* reside in the emulated memory through the BDA. TODO: actually do this */
#[derive(Eq, PartialEq)]
pub enum BIOSState
{
Ok,
Crashed
}
pub enum BootDrive
{
Floppy,
HardDrive
}
pub struct BIOS
{
pub state: BIOSState,
boot_drive: BootDrive
}
const BIOS_SEGMENT: u16 = 0xf000;
const ROM_CONF_TABLE_ADDR: u16 = 0xE6F5;
const EQUIPMENT_WORD: u16 = 0x21;
const EQUIPMENT_WORD_ADDR: u32 = 0x410;
impl BIOS
{
pub fn new(boot_drive: BootDrive) -> BIOS
{
BIOS
{
state: BIOSState::Ok,
boot_drive: boot_drive
}
}
pub fn cpu_trap(&mut self, cpu: &mut CPU, mem: &mut Memory, hw: &mut HW)
{
let ip = cpu.get_reg(WReg::IP);
/* IP = 0xFFF0 for boot, interrupt number otherwise */
match ip
{
0xfff0 => self.boot(cpu, mem, hw),
0x0 ... 0xff => self.handle_interrupt(cpu, mem, hw, ip as u8),
_ => panic!("Invalid IP value for BIOS call: {}", ip)
}
}
fn handle_interrupt(&mut self, cpu: &mut CPU, mem: &mut Memory, hw: &mut HW, interrupt_number: u8)
{
match interrupt_number
{
0x0 =>
{
/* CPU exception */
bios_print!("Unhandled CPU exception");
self.state = BIOSState::Crashed;
}
0x8 =>
{}
0x9 =>
{
bios_print!("Keyboard HW interrupt");
hw.keyboard.bios_pump_keystrokes(mem);
}
0x10 =>
{
/* Video services */
// TODO: move logs to display
let ah = cpu.get_reg(BReg::AH);
match ah
{
0x0 =>
{
let mode = cpu.get_reg(BReg::AL);
bios_print!("Set video mode {}", mode);
hw.display.set_mode(mem, display::GraphicMode::from_bios(mode))
}
0x1 =>
{
bios_print!("UNIMPLEMENTED: Set cursor shape")
}
0x2 =>
{
let page = cpu.get_reg(BReg::BH);
let y = cpu.get_reg(BReg::DH);
let x = cpu.get_reg(BReg::DL);
bios_print!("Set cursor coords for page {}: {} {}", page, x, y);
hw.display.tty_setcoords(mem, page, x, y);
mem.write_u8(0x450, x);
mem.write_u8(0x451, y);
}
0x3 =>
{
let mut page = cpu.get_reg(BReg::BH);
bios_print!("Get cursor position and size, page={}", page);
if page > 8
{
bios_print!("Warning: invalid page value; using current page instead");
page = hw.display.cur_page();
}
let (x, y) = hw.display.tty_coords(mem, page);
cpu.set_reg(BReg::CH, y * 8); // Start scan line
cpu.set_reg(BReg::CL, y * 8 + 7); // End scan line
cpu.set_reg(BReg::DH, y); // Row
cpu.set_reg(BReg::DL, x); // Col
}
0x5 =>
{
let page = cpu.get_reg(BReg::AL);
bios_print!("Select page {}", page);
hw.display.set_page(page);
}
0x6 =>
{
let cnt = match cpu.get_reg(BReg::AL)
{
0 => 25,
x => x
};
let page = hw.display.cur_page();
let y1 = cpu.get_reg(BReg::CH);
let x1 = cpu.get_reg(BReg::CL);
let y2 = cpu.get_reg(BReg::DH);
let x2 = cpu.get_reg(BReg::DL);
let attr = cpu.get_reg(BReg::BH);
bios_print!("Scroll, page={}, cnt={}; {}/{} -> {}/{}", page, cnt, x1, y1, x2, y2);
for _ in 0 .. cnt
{
hw.display.tty_scroll(mem, page, x1, y1, x2, y2, attr);
}
}
0x8 =>
{
let page = cpu.get_reg(BReg::BH);
bios_print!("Read char at cursor, page={}", page);
let (chr, attr) = hw.display.tty_read_at_cur(mem, page);
cpu.set_reg(BReg::AH, attr);
cpu.set_reg(BReg::AL, chr);
}
0x9 =>
{
bios_print!("Write char+attr at pos");
let chr = cpu.get_reg(BReg::AL);
let attr = cpu.get_reg(BReg::BL);
let page = cpu.get_reg(BReg::BH);
let cnt = cpu.get_reg(WReg::CX);
hw.display.write_char_at_cur(mem, page, chr, attr, cnt);
}
0x0e =>
{
let char_to_print = cpu.get_reg(BReg::AL);
let page = cpu.get_reg(BReg::BH);
let foreground_color = cpu.get_reg(BReg::BL);
bios_print!("tty output to page {}; chr={:02x}", page, char_to_print);
hw.display.tty_output(mem, page, char_to_print, foreground_color)
},
0xf =>
{
let mode = hw.display.get_mode().to_bios();
let page = hw.display.cur_page();
let cols = hw.display.get_mode().cols() as u8;
bios_print!("Get video mode: page={}, mode={}, cols={}", page, mode, cols);
cpu.set_reg(BReg::AH, cols);
cpu.set_reg(BReg::AL, mode); // Video mode
cpu.set_reg(BReg::BH, page);
},
0xb =>
{
bios_print!("IMPLEMENT ME: AH={:02x}", ah);
}
0x11 | 0x1a | 0xef | 0xfa | 0x10 | 0xf0 | 0x30 | 0x6f | 0xfe | 0xcc =>
{
bios_print!("Not implemented: video service; AX={:04x}", cpu.get_reg(WReg::AX));
self.set_carry(cpu, mem);
}
0x12 =>
{
assert!(cpu.get_reg(BReg::BL) == 0x10);
bios_print!("BAD IMPL: get ega info, count={}, bl={}", cpu.get_reg(BReg::AL), cpu.get_reg(BReg::BL));
}
0x1b =>
{
bios_print!("Video: functionality/state information (not supported)");
cpu.set_reg(BReg::AL, 0); // Not 1B = unsupported
self.set_carry(cpu, mem);
}
_ => panic!("Unhandled video service (int 0x10): {:x}", ah)
}
}
0x11 =>
{
/* Get equipment list
* Just return what we have: 80x25 color, a floppy disk and that's all */
bios_print!("Get equipment list");
let equipment_list_word = mem.read_u16(EQUIPMENT_WORD_ADDR);
cpu.set_reg(WReg::AX, equipment_list_word);
}
0x12 =>
{
/* Get memory size */
let memory_size_kb: u16 = 639;
bios_print!("Get memory size = 0x{:x}kB", memory_size_kb);
cpu.set_reg(WReg::AX, memory_size_kb);
self.clear_carry(cpu, mem);
}
0x13 =>
{
/* Disk services */
let ah = cpu.get_reg(BReg::AH);
let drive = cpu.get_reg(BReg::DL);
let target_storage = match drive
{
0x0 => hw.floppy.as_mut(),
0x80 => hw.hdd.as_mut(),
_ => None
};
match target_storage
{
None =>
match ah
{
0x15 => // Get disk type
{ cpu.set_reg(BReg::AH, 0x0); /* No such drive */ }
_ => self.set_carry(cpu, mem)
},
Some(storage) =>
match ah
{
0x0 =>
{
let status = storage.reset(mem);
cpu.set_reg(BReg::AH, status.get_bios_code());
self.set_carry_value(cpu, mem, status != storage::Status::Success);
}
0x2 =>
{
let sector_count = cpu.get_reg(BReg::AL);
let cx = cpu.get_reg(WReg::CX);
let cylinder = ((cx & 0xff00)>>8) + ((cx & 0x00c0) << 2);
let sector = (cx & 0x003f) as u8;
let head = cpu.get_reg(BReg::DH);
let data_seg = cpu.get_reg(SegReg::ES);
let data_addr = cpu.get_reg(WReg::BX);
let (status, read) =
storage.read_chs(mem, sector_count, cylinder, head, sector, data_seg, data_addr);
cpu.set_reg(BReg::AH, status.get_bios_code());
cpu.set_reg(BReg::AL, read);
self.set_carry_value(cpu, mem, status != storage::Status::Success);
}
0x3 =>
{
let sector_count = cpu.get_reg(BReg::AL);
let cx = cpu.get_reg(WReg::CX);
let cylinder = ((cx & 0xff00)>>8) + ((cx & 0x00c0) << 2);
let sector = (cx & 0x003f) as u8;
let head = cpu.get_reg(BReg::DH);
let data_seg = cpu.get_reg(SegReg::ES);
let data_addr = cpu.get_reg(WReg::BX);
let (status, written) =
storage.write_chs(mem, sector_count, cylinder, head, sector, data_seg, data_addr);
cpu.set_reg(BReg::AH, status.get_bios_code());
cpu.set_reg(BReg::AL, written);
self.set_carry_value(cpu, mem, status != storage::Status::Success);
}
0x8 =>
{
/* Get drive parameters */
bios_print!("Get drive parameters for drive 0x{:02x}", drive);
cpu.set_reg(BReg::DL, 0x1); // Drive count
let drive_type = match drive
{
0 => 4, // 1.44M floppy
0x80 => 0,
_ => unreachable!()
};
let cyls = storage.parameters.cylinders() as u16;
let heads_min1 = (storage.parameters.heads() - 1) as u8;
let sectors_per_track = storage.parameters.sectors_per_track() as u8;
cpu.set_reg(WReg::AX, 0);
cpu.set_reg(BReg::BL, drive_type);
cpu.set_reg(BReg::CH, cyls as u8);
cpu.set_reg(BReg::CL, sectors_per_track | (((cyls >> 8) as u8) << 6));
cpu.set_reg(BReg::DH, heads_min1);
self.clear_carry(cpu, mem);
}
0x15 =>
{
/* Get disk type */
bios_print!("Get disk type for drive {}", drive);
match drive
{
0 => cpu.set_reg(BReg::AH, 2), // Floppy with change line support
0x80 =>
{
cpu.set_reg(BReg::AH, 3); // Hard drive
// Need to return sector count for hard drives
let cyls = storage.parameters.cylinders();
let heads = storage.parameters.heads();
let sectors_per_track = storage.parameters.sectors_per_track();
let sector_count = (cyls as u32) * (heads as u32) * (sectors_per_track as u32);
cpu.set_reg(WReg::CX, (sector_count >> 16) as u16);
cpu.set_reg(WReg::DX, sector_count as u16);
}
_ => unreachable!()
};
self.clear_carry(cpu, mem);
}
0x16 =>
{
/* Disk change is not supported */
bios_print!("Detect disk change for disk 0x{:02x}", drive);
self.clear_carry(cpu, mem);
cpu.set_reg(BReg::AH, 0);
}
_ => panic!("Unhandled disk service (int 0x13): {:x}", ah)
}
}
}
0x14 =>
{
/* Serial */
let ah = cpu.get_reg(BReg::AH);
match ah
{
0x0 =>
{
/* Initialize port */
bios_print!("Serial port initialize (?)");
cpu.set_reg(BReg::AH, 0x80); // timeout
}
_ => panic!("Unhandled serial service: {:x}", ah)
}
}
0x15 =>
{
let ah = cpu.get_reg(BReg::AH);
match ah
{
0x41 =>
{
bios_print!("BAD IMPLEMENTATION: wait on external event");
}
0x88 =>
{
bios_print!("Get extended memory size");
/* This is not a 286+ machine */
self.set_carry(cpu, mem);
cpu.set_reg(BReg::AH, 0x86); // Unsupported function
}
0xC0 =>
{
/* Get configuration */
bios_print!("Get configuration");
cpu.set_reg(WReg::AX, 0);
cpu.set_reg(WReg::BX, ROM_CONF_TABLE_ADDR);
cpu.set_reg(SegReg::ES, BIOS_SEGMENT);
self.clear_carry(cpu, mem);
}
0xC1 =>
{
bios_print!("Get EBDA address (unsupported)");
self.set_carry(cpu, mem);
}
_ => panic!("Unhandled 0x15 irq (ah={:x})", ah)
}
}
0x16 =>
{
/* Keyboard services */
let ah = cpu.get_reg(BReg::AH);
match ah
{
0x0 =>
{
bios_print!("Wait for keystroke");
/* Disable keyboard interrupt as, if we pump a scancode,
* we will use it immediately and pop it from the IO queue*/
hw.keyboard.set_irq(false);
loop
{
hw.keyboard.bios_pump_keystrokes(mem);
if let Some(keystroke) = hw.keyboard.try_pop_keystroke()
{
cpu.set_reg(BReg::AH, keystroke.scancode);
cpu.set_reg(BReg::AL, keystroke.ascii);
break;
}
hw.wait_for_event(cpu);
}
hw.keyboard.set_irq(true);
}
0x1 =>
{
// bios_print!("Check for keystroke");
match hw.keyboard.check_keystroke()
{
None => self.set_flag_value(cpu, mem, FLAG_Z, true),
Some(keystroke) =>
{
bios_print!("Check for keystroke and got a keystroke");
self.set_flag_value(cpu, mem, FLAG_Z, false);
cpu.set_reg(BReg::AH, keystroke.scancode); // BIOS scancode
cpu.set_reg(BReg::AL, keystroke.ascii); // ASCII scancode
}
}
}
0x2 =>
{
// bios_print!("Get shift flags");
cpu.set_reg(BReg::AL, hw.keyboard.get_shift_flags(mem));
}
0x92 | 0x55 | 0x03 | 0xff =>
{
bios_print!("Unimplemented keyboard service: AX={:04x}", cpu.get_reg(WReg::AX));
self.set_carry(cpu, mem);
}
_ => panic!("Unhandled keyboard service: {:x}", ah)
}
}
0x17 =>
{
/* Printer services */
let ah = cpu.get_reg(BReg::AH);
match ah
{
0x1 =>
{
/* Initialize port */
bios_print!("Initialize printer?");
cpu.set_reg(BReg::AH, 0b00001000); // IO error
self.set_carry(cpu, mem);
}
_ => panic!("Unhandled printer service: {:x}", ah)
}
}
0x1a =>
{
/* Time services */
let ah = cpu.get_reg(BReg::AH);
match ah
{
0x0 =>
{
/* Get system time */
let lo = mem.read_u16(0x46c);
let hi = mem.read_u16(0x46e);
let mf = mem.read_u8(0x470);
if mf != 0
{
mem.write_u8(0x470, 0);
}
bios_print!("Get system time - {:04x}{:04x}", hi, lo);
cpu.set_reg(BReg::AL, mf);
cpu.set_reg(WReg::CX, hi);
cpu.set_reg(WReg::DX, lo);
}
0x1 =>
{
/* Set system time */
bios_print!("UNIMPLEMENTED: Set system time")
}
0x2 =>
{
/* Get real-time time */
bios_print!("BAD IMPLEMENTATION: Get real-time clock time");
self.clear_carry(cpu, mem);
cpu.set_reg(BReg::CH, 0); // Hours
cpu.set_reg(BReg::CL, 0); // Minutes
cpu.set_reg(BReg::DH, 0); // Seconds
cpu.set_reg(BReg::DL, 0); // Daylight saving time flag
}
0x4 =>
{
bios_print!("Unimplemented time service; ah = {:x}", ah);
self.set_carry(cpu, mem);
}
_ => panic!("Unhandled time service: {:x}", ah)
}
}
_ => panic!("Unhandled interrupt: 0x{:x} (ah={:x})", interrupt_number, cpu.get_reg(BReg::AH))
}
}
fn boot(&mut self, cpu: &mut CPU, mem: &mut Memory, hw: &mut HW)
{
bios_print!("Boot");
self.init_ivt(mem);
{
let mut storage_init =
|storage_opt: &mut Option<storage::Storage>|
{
storage_opt.as_mut().map(|storage|
{
let (irq, seg, addr) = storage.init(mem);
self.write_ivt_entry(mem, irq, seg, addr);
});
};
storage_init(&mut hw.floppy);
storage_init(&mut hw.hdd);
}
self.init_romconf(mem);
bios_print!("Loading MBR... ");
let (boot_storage, bios_drive) =
match self.boot_drive
{
BootDrive::Floppy => (hw.floppy.as_mut().expect("Floppy disk image not specified"), 0x0),
BootDrive::HardDrive => (hw.hdd.as_mut().expect("Hard drive image not specified"), 0x80)
};
cpu.set_reg(BReg::DL, bios_drive); // DL should contain the BIOS id of the boot drive
let (result, read) = boot_storage.read(mem, 0x0, 0x7c00, 512);
if result != storage::Status::Success || read != 512
{
panic!("Unable to read MBR");
}
/* Write a "jmp far 0:0x7c00" instruction at our current location,
* which should be 0xf000:0xfff0 */
let far_jmp_7c00: [u8; 5] = [0xea, 0x00, 0x7c, 0x00, 0x00];
for i in 0..far_jmp_7c00.len()
{
mem.write_u8(phys_addr(BIOS_SEGMENT, 0xfff0 + i as u16), far_jmp_7c00[i]);
}
// Sets 80x25 display mode
hw.display.set_mode(mem, display::GraphicMode::T8025);
bios_print!("All done; now running the MBR")
}
fn write_ivt_entry(&self, mem: &mut Memory, number: u8, seg: u16, addr: u16)
{
const IVT_OFFSET: u32 = 0;
mem.write_u16(IVT_OFFSET + (number as u32) * 4, addr);
mem.write_u16(IVT_OFFSET + (number as u32) * 4 + 2, seg);
}
/*fn read_ivt_entry(&self, mem: &Memory, number: u8) -> (u16, u16)
{
const IVT_OFFSET: u32 = 0;
(
mem.read_u16(IVT_OFFSET + (number as u32) * 4 + 2), // Segment
mem.read_u16(IVT_OFFSET + (number as u32) * 4) // Address
)
}*/
fn init_ivt(&mut self, mem: &mut Memory)
{
const IRET: u8 = 0b11001111;
for irq in 0..0xff
{
/* CS = BIOS_SEGMENT; ip = #IRQ */
self.write_ivt_entry(mem, irq, BIOS_SEGMENT, irq as u16);
mem.write_u8(phys_addr(BIOS_SEGMENT, irq as u16), IRET); // Interrupt handler in ROM
}
}
fn init_romconf(&self, mem: &mut Memory)
{
bios_print!("Setting rom configuration...");
mem.write_u16(phys_addr(BIOS_SEGMENT, ROM_CONF_TABLE_ADDR + 0), 8); // Table size - this entry (2bytes)
mem.write_u8(phys_addr(BIOS_SEGMENT, ROM_CONF_TABLE_ADDR + 2), 0xFC); // Model: Linux DOSEMU (should be fine?)
mem.write_u8(phys_addr(BIOS_SEGMENT, ROM_CONF_TABLE_ADDR + 3), 0); // Submodel
mem.write_u8(phys_addr(BIOS_SEGMENT, ROM_CONF_TABLE_ADDR + 4), 0); // BIOS revision
mem.write_u8(phys_addr(BIOS_SEGMENT, ROM_CONF_TABLE_ADDR + 5), 0b00000000); // Feature byte 1
mem.write_u8(phys_addr(BIOS_SEGMENT, ROM_CONF_TABLE_ADDR + 6), 0b00000000); // Feature byte 2
mem.write_u8(phys_addr(BIOS_SEGMENT, ROM_CONF_TABLE_ADDR + 7), 0b00000000); // Feature byte 3
mem.write_u8(phys_addr(BIOS_SEGMENT, ROM_CONF_TABLE_ADDR + 8), 0b00000000); // Feature byte 4
mem.write_u8(phys_addr(BIOS_SEGMENT, ROM_CONF_TABLE_ADDR + 9), 0b00000000); // Feature byte 5
mem.write_u16(EQUIPMENT_WORD_ADDR, EQUIPMENT_WORD); // TODO: phys_addr + BDA_SEG?
}
/* TODO: should be moved out of impl? */
fn set_flag_value(&self, cpu: &CPU, mem: &mut Memory, flag_mask: u16, set: bool)
{
/* We cannot change the CPU flags directly as they will be restored
* by IRET. However, we can change the flags we saved on the stack,
* which we do */
let flags_addr = phys_addr(cpu.get_reg(SegReg::SS), cpu.get_reg(WReg::SP) + 4);
let mut flags = mem.read_u16(flags_addr);
if set
{
flags = flags | flag_mask;
}
else
{
flags = flags & not(flag_mask);
}
mem.write_u16(flags_addr, flags);
}
fn set_carry_value(&self, cpu: &CPU, mem: &mut Memory, set: bool)
{
self.set_flag_value(cpu, mem, FLAG_C, set);
}
fn set_carry(&self, cpu: &CPU, mem: &mut Memory)
{
self.set_carry_value(cpu, mem, true);
}
fn clear_carry(&self, cpu: &CPU, mem: &mut Memory)
{
self.set_carry_value(cpu, mem, false);
}
} |
#[doc = "Register `MTLTXQUR` reader"]
pub type R = crate::R<MTLTXQUR_SPEC>;
#[doc = "Register `MTLTXQUR` writer"]
pub type W = crate::W<MTLTXQUR_SPEC>;
#[doc = "Field `UFFRMCNT` reader - Underflow Packet Counter This field indicates the number of packets aborted by the controller because of Tx queue Underflow. This counter is incremented each time the MAC aborts outgoing packet because of underflow. The counter is cleared when this register is read.\n\nThe field is **cleared** (set to zero) following a read operation."]
pub type UFFRMCNT_R = crate::FieldReader<u16>;
#[doc = "Field `UFFRMCNT` writer - Underflow Packet Counter This field indicates the number of packets aborted by the controller because of Tx queue Underflow. This counter is incremented each time the MAC aborts outgoing packet because of underflow. The counter is cleared when this register is read."]
pub type UFFRMCNT_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 11, O, u16>;
#[doc = "Field `UFCNTOVF` reader - Overflow Bit for Underflow Packet Counter This bit is set every time the Tx queue Underflow Packet Counter field overflows, that is, it has crossed the maximum count. In such a scenario, the overflow packet counter is reset to all-zeros and this bit indicates that the rollover happened.\n\nThe field is **cleared** (set to zero) following a read operation."]
pub type UFCNTOVF_R = crate::BitReader;
#[doc = "Field `UFCNTOVF` writer - Overflow Bit for Underflow Packet Counter This bit is set every time the Tx queue Underflow Packet Counter field overflows, that is, it has crossed the maximum count. In such a scenario, the overflow packet counter is reset to all-zeros and this bit indicates that the rollover happened."]
pub type UFCNTOVF_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 0:10 - Underflow Packet Counter This field indicates the number of packets aborted by the controller because of Tx queue Underflow. This counter is incremented each time the MAC aborts outgoing packet because of underflow. The counter is cleared when this register is read."]
#[inline(always)]
pub fn uffrmcnt(&self) -> UFFRMCNT_R {
UFFRMCNT_R::new((self.bits & 0x07ff) as u16)
}
#[doc = "Bit 11 - Overflow Bit for Underflow Packet Counter This bit is set every time the Tx queue Underflow Packet Counter field overflows, that is, it has crossed the maximum count. In such a scenario, the overflow packet counter is reset to all-zeros and this bit indicates that the rollover happened."]
#[inline(always)]
pub fn ufcntovf(&self) -> UFCNTOVF_R {
UFCNTOVF_R::new(((self.bits >> 11) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:10 - Underflow Packet Counter This field indicates the number of packets aborted by the controller because of Tx queue Underflow. This counter is incremented each time the MAC aborts outgoing packet because of underflow. The counter is cleared when this register is read."]
#[inline(always)]
#[must_use]
pub fn uffrmcnt(&mut self) -> UFFRMCNT_W<MTLTXQUR_SPEC, 0> {
UFFRMCNT_W::new(self)
}
#[doc = "Bit 11 - Overflow Bit for Underflow Packet Counter This bit is set every time the Tx queue Underflow Packet Counter field overflows, that is, it has crossed the maximum count. In such a scenario, the overflow packet counter is reset to all-zeros and this bit indicates that the rollover happened."]
#[inline(always)]
#[must_use]
pub fn ufcntovf(&mut self) -> UFCNTOVF_W<MTLTXQUR_SPEC, 11> {
UFCNTOVF_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "Tx queue underflow register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`mtltxqur::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`mtltxqur::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct MTLTXQUR_SPEC;
impl crate::RegisterSpec for MTLTXQUR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`mtltxqur::R`](R) reader structure"]
impl crate::Readable for MTLTXQUR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`mtltxqur::W`](W) writer structure"]
impl crate::Writable for MTLTXQUR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets MTLTXQUR to value 0"]
impl crate::Resettable for MTLTXQUR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// http://codekata.com/kata/kata04-data-munging/
//
// Kata04: Data Munging
//
// Martin Fowler gave me a hard time for Kata02, complaining that it was yet
// another single-function, academic exercise. Which, or course, it was. So this
// week let’s mix things up a bit.
//
// Here’s an exercise in three parts to do with real world data. Try hard not to
// read ahead—do each part in turn.
//
// Part One: Weather Data
//
// In weather.dat you’ll find daily weather data for Morristown, NJ for June 2002.
// Download this text file, then write a program to output the day number (column one)
// with the smallest temperature spread (the maximum temperature is the second column,
// the minimum the third column).
//
#![feature(globs)]
use std::num;
use std::io::File;
use std::fmt;
pub struct DailyTempSpread {
date: int,
max: int,
min: int,
temp_spread: int
}
impl DailyTempSpread {
fn new(day: int, max: int, min: int) -> DailyTempSpread {
DailyTempSpread { date: day,
max: max,
min: min,
temp_spread: max - min
}
}
}
impl std::cmp::PartialOrd for DailyTempSpread {
fn partial_cmp(&self, other: &DailyTempSpread) -> Option<Ordering> {
self.temp_spread.partial_cmp(&other.temp_spread)
}
}
impl fmt::Show for DailyTempSpread {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "({}, {}, {}, {})",
self.date, self.max, self.min, self.temp_spread)
}
}
impl Ord for DailyTempSpread {
fn cmp(&self, other: &DailyTempSpread) -> Ordering {
self.temp_spread.cmp(&other.temp_spread)
}
}
impl Eq for DailyTempSpread {}
impl PartialEq for DailyTempSpread {
fn eq(&self, other: &DailyTempSpread) -> bool {
self.temp_spread == other.temp_spread
}
}
fn sanitize<'a>(unsanitized: &'a str) -> &'a str {
if unsanitized.ends_with("*") {
return unsanitized.trim_right_chars('*')
} else {
return unsanitized
}
}
pub fn parse_line<'a>(line: &'a str) -> Option<DailyTempSpread> {
if line.is_empty() || !line.char_at(3).is_digit() {
None
} else {
let l: Vec<&str> = line.words().collect();
let date: int = num::from_str_radix(sanitize(l[0]), 10).unwrap();
let max: int = num::from_str_radix(sanitize(l[1]), 10).unwrap();
let min: int = num::from_str_radix(sanitize(l[2]), 10).unwrap();
Some(DailyTempSpread::new( date, max, min ))
}
}
fn find_highest_spread(mut days_list: Vec<DailyTempSpread>) -> DailyTempSpread {
days_list.sort();
days_list.pop().unwrap()
}
fn main() {
let path = Path::new("assets/weather.dat");
let mut file = File::open(&path);
let data = file.read_to_end().unwrap();
let data_string = String::from_utf8(data);
let mut days: Vec<DailyTempSpread> = Vec::new();
for line in data_string.unwrap().as_slice().lines() {
match parse_line(line) {
Some(x) => days.push(x),
None => ()
}
}
let highest: DailyTempSpread = find_highest_spread(days);
println!("June {}: {} degrees", highest.date, highest.temp_spread);
}
mod tests {
#[test]
fn test_sanitize() {
assert_eq!("32".as_slice(), super::sanitize("32*".as_slice()));
assert_eq!("32".as_slice(), super::sanitize("32".as_slice()));
}
#[test]
fn test_parse_line() {
let line = " 4 77 59 68 51.1 0.00 110 9.1 130 12 8.6 62 40 1021.1";
let day = super::DailyTempSpread::new( 4, 77, 59);
assert_eq!(Some(day), super::parse_line(line));
assert_eq!(None, super::parse_line("".as_slice()));
let tricky_line = " 26 97* 64";
let day2 = super::DailyTempSpread::new(26, 97, 64);
assert_eq!(Some(day2), super::parse_line(tricky_line));
let starts_with_word = " mo 82.9 60.5 71.7 16 58.8 0.00 6.9 5.3";
assert_eq!(None, super::parse_line(starts_with_word));
}
}
|
#[doc = "Reader of register CMP1_SW"]
pub type R = crate::R<u32, super::CMP1_SW>;
#[doc = "Writer for register CMP1_SW"]
pub type W = crate::W<u32, super::CMP1_SW>;
#[doc = "Register CMP1_SW `reset()`'s with value 0"]
impl crate::ResetValue for super::CMP1_SW {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `CMP1_IP1`"]
pub type CMP1_IP1_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CMP1_IP1`"]
pub struct CMP1_IP1_W<'a> {
w: &'a mut W,
}
impl<'a> CMP1_IP1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `CMP1_AP1`"]
pub type CMP1_AP1_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CMP1_AP1`"]
pub struct CMP1_AP1_W<'a> {
w: &'a mut W,
}
impl<'a> CMP1_AP1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `CMP1_BP1`"]
pub type CMP1_BP1_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CMP1_BP1`"]
pub struct CMP1_BP1_W<'a> {
w: &'a mut W,
}
impl<'a> CMP1_BP1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `CMP1_IN1`"]
pub type CMP1_IN1_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CMP1_IN1`"]
pub struct CMP1_IN1_W<'a> {
w: &'a mut W,
}
impl<'a> CMP1_IN1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `CMP1_AN1`"]
pub type CMP1_AN1_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CMP1_AN1`"]
pub struct CMP1_AN1_W<'a> {
w: &'a mut W,
}
impl<'a> CMP1_AN1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `CMP1_BN1`"]
pub type CMP1_BN1_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CMP1_BN1`"]
pub struct CMP1_BN1_W<'a> {
w: &'a mut W,
}
impl<'a> CMP1_BN1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `CMP1_VN1`"]
pub type CMP1_VN1_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CMP1_VN1`"]
pub struct CMP1_VN1_W<'a> {
w: &'a mut W,
}
impl<'a> CMP1_VN1_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bit 0 - Comparator 1 positive terminal isolation switch to GPIO"]
#[inline(always)]
pub fn cmp1_ip1(&self) -> CMP1_IP1_R {
CMP1_IP1_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Comparator 1 positive terminal switch to amuxbusA"]
#[inline(always)]
pub fn cmp1_ap1(&self) -> CMP1_AP1_R {
CMP1_AP1_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Comparator 1 positive terminal switch to amuxbusB"]
#[inline(always)]
pub fn cmp1_bp1(&self) -> CMP1_BP1_R {
CMP1_BP1_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 4 - Comparator 1 negative terminal isolation switch to GPIO"]
#[inline(always)]
pub fn cmp1_in1(&self) -> CMP1_IN1_R {
CMP1_IN1_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Comparator 1 negative terminal switch to amuxbusA"]
#[inline(always)]
pub fn cmp1_an1(&self) -> CMP1_AN1_R {
CMP1_AN1_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Comparator 1 negative terminal switch to amuxbusB"]
#[inline(always)]
pub fn cmp1_bn1(&self) -> CMP1_BN1_R {
CMP1_BN1_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Comparator 1 negative terminal switch to local Vref (LPREF_EN must be set)"]
#[inline(always)]
pub fn cmp1_vn1(&self) -> CMP1_VN1_R {
CMP1_VN1_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Comparator 1 positive terminal isolation switch to GPIO"]
#[inline(always)]
pub fn cmp1_ip1(&mut self) -> CMP1_IP1_W {
CMP1_IP1_W { w: self }
}
#[doc = "Bit 1 - Comparator 1 positive terminal switch to amuxbusA"]
#[inline(always)]
pub fn cmp1_ap1(&mut self) -> CMP1_AP1_W {
CMP1_AP1_W { w: self }
}
#[doc = "Bit 2 - Comparator 1 positive terminal switch to amuxbusB"]
#[inline(always)]
pub fn cmp1_bp1(&mut self) -> CMP1_BP1_W {
CMP1_BP1_W { w: self }
}
#[doc = "Bit 4 - Comparator 1 negative terminal isolation switch to GPIO"]
#[inline(always)]
pub fn cmp1_in1(&mut self) -> CMP1_IN1_W {
CMP1_IN1_W { w: self }
}
#[doc = "Bit 5 - Comparator 1 negative terminal switch to amuxbusA"]
#[inline(always)]
pub fn cmp1_an1(&mut self) -> CMP1_AN1_W {
CMP1_AN1_W { w: self }
}
#[doc = "Bit 6 - Comparator 1 negative terminal switch to amuxbusB"]
#[inline(always)]
pub fn cmp1_bn1(&mut self) -> CMP1_BN1_W {
CMP1_BN1_W { w: self }
}
#[doc = "Bit 7 - Comparator 1 negative terminal switch to local Vref (LPREF_EN must be set)"]
#[inline(always)]
pub fn cmp1_vn1(&mut self) -> CMP1_VN1_W {
CMP1_VN1_W { w: self }
}
}
|
//ported from https://github.com/masahi/ocaml_practice/blob/master/mooc/klotski.ml
use std::cmp::Ordering;
use std::collections::BTreeSet;
use std::ops::{Deref, DerefMut};
fn find_index<T>(pred: impl FnMut(&T) -> bool, vec: &Vec<T>) -> Option<usize> {
vec.iter().position(pred)
}
/* fn flat_map<T, F>(rel: F) -> impl Fn(&Vec<T>) -> Vec<T>
where
F: Fn(&T) -> Vec<T> + Copy,
{
move |conf_list| conf_list.iter().flat_map(rel).collect()
}
fn solve_slow<T, F, P>(r: F, p: P, a: T) -> T
where
F: Fn(&T) -> Vec<T> + Copy,
P: FnMut(&T) -> bool + Copy,
{
fn iter<T, F, P>(mut configs: Vec<T>, r: F, p: P) -> T
where
F: Fn(&T) -> Vec<T> + Copy,
P: FnMut(&T) -> bool + Copy,
{
match find_index(p, &configs) {
Some(ind) => configs.remove(ind),
None => iter(flat_map(r)(&configs), r, p),
}
}
iter(vec![a], r, p)
}
*/
type Set<T> = BTreeSet<T>;
fn expand_frontiers<T, F>(r: F, (s, l): (Set<T>, Vec<T>)) -> (Set<T>, Vec<T>)
where
F: Fn(&T) -> Vec<T>,
T: Clone + Ord,
{
l.into_iter().fold((s, vec![]), |(seen, new_elts), x| {
r(&x)
.into_iter()
.fold((seen, new_elts), |(mut set, mut frontiers), elt| {
if set.contains(&elt) {
(set, frontiers)
} else {
frontiers.push(elt.clone());
set.insert(elt);
(set, frontiers)
}
})
})
}
fn solve<T, F, P>(r: F, p: P, a: T) -> T
where
F: Fn(&T) -> Vec<T> + Copy,
P: FnMut(&T) -> bool + Copy,
T: Clone + Ord,
{
fn iter<T, F, P>(r: F, p: P, s: Set<T>, mut l: Vec<T>, round: usize) -> T
where
F: Fn(&T) -> Vec<T> + Copy,
P: FnMut(&T) -> bool + Copy,
T: Clone + Ord,
{
match find_index(p, &l) {
Some(ind) => l.remove(ind),
None => {
let (s, l) = expand_frontiers(r, (s, l));
println!("Round {}, frontier size {}", round, l.len());
iter(r, p, s, l, round + 1)
}
}
}
let mut init_set = Set::new();
init_set.insert(a.clone());
iter(r, p, init_set, vec![a], 0)
}
fn solve_path<T, F, P>(r: F, mut p: P, a: T) -> Vec<T>
where
F: Fn(&T) -> Vec<T> + Copy,
P: FnMut(&T) -> bool + Copy,
T: Clone + Ord,
{
#[derive(PartialEq, Eq, Clone)]
struct SolutionPath<T>(Vec<T>);
impl<T: Ord> SolutionPath<T> {
fn new() -> SolutionPath<T> {
SolutionPath(vec![])
}
fn push(&mut self, elem: T) {
self.0.push(elem)
}
fn last(&self) -> Option<&T> {
self.0.last()
}
}
impl<T: Ord> Ord for SolutionPath<T> {
fn cmp(&self, other: &Self) -> Ordering {
match (self.last(), other.last()) {
(None, None) => Ordering::Equal,
(Some(_), None) => Ordering::Greater,
(None, Some(_)) => Ordering::Less,
(Some(x), Some(y)) => x.cmp(y),
}
}
}
impl<T: Ord> PartialOrd for SolutionPath<T> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
let path_rel = move |path: &SolutionPath<T>| match path.last() {
None => vec![],
Some(last) => {
let new_confs = r(last);
new_confs
.into_iter()
.map(|conf| {
let mut p = path.clone();
p.push(conf);
p
})
.collect()
}
};
let path_prop = move |path: &SolutionPath<T>| match path.last() {
None => false,
Some(last) => p(last),
};
let mut init = SolutionPath::new();
init.push(a);
let sol = solve(path_rel, path_prop, init);
sol.0
}
trait Puzzle<Conf, Move> {
fn apply_move(&self, c: &Conf, m: &Move) -> Conf;
fn possible_move(&self, c: &Conf) -> Vec<Move>;
fn is_final(&self, c: &Conf) -> bool;
}
//fn solve_puzzle<Conf, Move, P>(puzzle: P, init_conf: Conf) -> Vec<Conf>
fn solve_puzzle<Conf, Move, P>(puzzle: P, init_conf: Conf) -> Conf
where
Conf: Clone + Ord,
P: Puzzle<Conf, Move> + Copy,
{
let rel = move |conf: &Conf| {
let moves = puzzle.possible_move(conf);
moves
.into_iter()
.map(|mv| puzzle.apply_move(conf, &mv))
.collect()
};
solve(rel, |c| puzzle.is_final(c), init_conf)
}
#[derive(Copy, Clone, PartialEq, Eq)]
enum PieceKind {
S,
H,
V,
C,
X,
}
#[derive(Copy, Clone, PartialEq, Eq)]
struct Piece {
kind: PieceKind,
index: u8,
}
impl Piece {
fn to_string(&self) -> String {
let ch = match self.kind {
PieceKind::S => "S",
PieceKind::H => "H",
PieceKind::C => "C",
PieceKind::V => "V",
PieceKind::X => "X",
};
format!("({}, {})", ch, self.index)
}
}
static X: Piece = Piece {
kind: PieceKind::X,
index: 0,
};
static S: Piece = Piece {
kind: PieceKind::S,
index: 0,
};
static H: Piece = Piece {
kind: PieceKind::H,
index: 0,
};
static C0: Piece = Piece {
kind: PieceKind::C,
index: 0,
};
static C1: Piece = Piece {
kind: PieceKind::C,
index: 1,
};
static C2: Piece = Piece {
kind: PieceKind::C,
index: 2,
};
static C3: Piece = Piece {
kind: PieceKind::C,
index: 3,
};
static V0: Piece = Piece {
kind: PieceKind::V,
index: 0,
};
static V1: Piece = Piece {
kind: PieceKind::V,
index: 1,
};
static V2: Piece = Piece {
kind: PieceKind::V,
index: 2,
};
static V3: Piece = Piece {
kind: PieceKind::V,
index: 3,
};
impl Ord for Piece {
fn cmp(
&self,
Piece {
kind: k2,
index: ind2,
}: &Self,
) -> Ordering {
let kind_to_int = |k| match k {
PieceKind::S => 5,
PieceKind::H => 4,
PieceKind::C => 3,
PieceKind::V => 2,
PieceKind::X => 1,
};
if self.kind == *k2 {
self.index.cmp(&ind2)
} else {
kind_to_int(self.kind).cmp(&kind_to_int(*k2))
}
}
}
impl PartialOrd for Piece {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[derive(Eq, PartialEq, Clone, Copy)]
struct Board([[Piece; 4]; 5]);
impl Ord for Board {
fn cmp(&self, other: &Self) -> Ordering {
let b1 = self.0;
let b2 = other.0;
for i in 0..5 {
for j in 0..4 {
let c = b1[i][j].cmp(&b2[i][j]);
if c != Ordering::Equal {
return c;
}
}
}
Ordering::Equal
}
}
impl PartialOrd for Board {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Deref for Board {
type Target = [[Piece; 4]; 5];
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for Board {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
#[derive(Clone, Copy, PartialEq)]
struct Direction(i8, i8);
#[derive(Clone, Copy, PartialEq)]
struct Pos(u8, u8);
#[derive(Clone, PartialEq)]
struct Move {
piece: Piece,
next_occupied_pos: Vec<(u8, u8)>,
next_vacant_pos: Vec<(u8, u8)>,
}
#[derive(Copy, Clone)]
struct Klotski;
fn move_piece(board: &Board, piece: Piece, pos: Pos, dir: Direction) -> Option<Move> {
let vec_diff = |v1: &Vec<(u8, u8)>, v2: &Vec<(u8, u8)>| {
v1.iter()
.filter(|elt| v2.iter().find(|el2| el2 == elt).is_none())
.cloned()
.collect()
};
let occupied_pos = |i, j| match piece.kind {
PieceKind::S => vec![(i, j), (i + 1, j), (i, j + 1), (i + 1, j + 1)],
PieceKind::H => vec![(i, j), (i, j + 1)],
PieceKind::V => vec![(i, j), (i + 1, j)],
PieceKind::C => vec![(i, j)],
_ => unreachable!(),
};
let diff_occupied_pos = |i, j| {
let current = occupied_pos(i, j);
let next = occupied_pos((i as i8 + dir.0) as u8, (j as i8 + dir.1) as u8);
let next_occupied_pos = vec_diff(&next, ¤t);
let next_vacant_pos = vec_diff(¤t, &next);
(next_occupied_pos, next_vacant_pos)
};
let can_move = |next_occupied: &Vec<(u8, u8)>| {
let in_bound = |i, j| i < 5 && j < 4;
next_occupied
.iter()
.all(|(i, j)| in_bound(*i, *j) && board[*i as usize][*j as usize] == X)
};
let is_dir_safe = |i, j| !((i == 0 && dir.0 == -1) || (j == 0 && dir.1 == -1));
if is_dir_safe(pos.0, pos.1) {
let (next_occupied_pos, next_vacant_pos) = diff_occupied_pos(pos.0, pos.1);
if can_move(&next_occupied_pos) {
Some(Move {
piece,
next_occupied_pos,
next_vacant_pos,
})
} else {
None
}
} else {
None
}
}
fn get_piece_positions(board: &Board) -> Vec<(Piece, Pos)> {
let mut pairs = Vec::new();
let mut seen = Set::new();
for i in 0..5 {
for j in 0..4 {
let piece = board[i][j];
if piece != X && !seen.contains(&piece) {
pairs.push((piece, Pos(i as u8, j as u8)));
seen.insert(piece);
}
}
}
pairs
}
impl Puzzle<Board, Move> for Klotski {
fn apply_move(
&self,
board: &Board,
Move {
piece,
next_occupied_pos,
next_vacant_pos,
}: &Move,
) -> Board {
let mut board_copy = board.clone();
next_occupied_pos
.iter()
.for_each(|(i, j)| board_copy[*i as usize][*j as usize] = *piece);
next_vacant_pos
.iter()
.for_each(|(i, j)| board_copy[*i as usize][*j as usize] = X);
board_copy
}
fn possible_move(&self, b: &Board) -> Vec<Move> {
let get_moves = |(p, pos)| -> Vec<Move> {
let directions = vec![
Direction(0, 1),
Direction(0, -1),
Direction(1, 0),
Direction(-1, 0),
];
directions
.into_iter()
.filter_map(|dir| move_piece(b, p, pos, dir))
.collect()
};
get_piece_positions(b)
.into_iter()
.flat_map(get_moves)
.collect()
}
fn is_final(&self, b: &Board) -> bool {
b[3][1] == S && b[3][2] == S && b[4][1] == S && b[3][2] == S
}
}
//fn solve_klotski(initial_board: Board) -> Vec<Board> {
fn solve_klotski(initial_board: Board) -> Board {
solve_puzzle(Klotski, initial_board)
}
fn print_board(board: &Board) {
for i in 0..5 {
for j in 0..4 {
print!("{} ", board[i][j].to_string())
}
println!("")
}
println!("")
}
fn main() {
let initial_board_simpler = [
[C2, S, S, C1],
[C0, S, S, C3],
[V1, V2, V3, V0],
[V1, V2, V3, V0],
[X, X, X, X],
];
let initial_board = [
[V0, S, S, V1],
[V0, S, S, V1],
[V2, H, H, V3],
[V2, C0, C1, V3],
[C2, X, X, C3],
];
let trivial_board = [
[X, S, S, X],
[X, S, S, X],
[X, X, X, X],
[X, X, X, X],
[X, X, X, X],
];
let sol = solve_klotski(Board(initial_board_simpler));
//sol.iter().for_each(print_board)
print_board(&sol);
}
|
use js_sys::Promise;
use wasm_bindgen::JsValue;
use web_sys::console;
use web_sys::HtmlMediaElement;
use yew::prelude::*;
pub(crate) struct Thumb {
audio_ref: NodeRef,
}
impl Component for Thumb {
type Message = Msg;
type Properties = Props;
fn create(_ctx: &Context<Self>) -> Self {
Self {
audio_ref: NodeRef::default(),
}
}
fn update(&mut self, ctx: &Context<Self>, msg: Self::Message) -> bool {
match msg {
Msg::PlayRequest => {
let promise = self
.audio_ref
.cast::<HtmlMediaElement>()
.unwrap()
.play()
.unwrap();
ctx.link().send_future(async {
match play_the_thing(promise).await {
Ok(_) => Msg::Success,
Err(_) => Msg::Failure,
}
});
true
}
_ => false,
}
}
fn view(&self, ctx: &Context<Self>) -> Html {
let onclick = ctx.link().callback(|_| {
console::debug_1(&JsValue::from_str("hi"));
Msg::PlayRequest
});
html! {
<td>
<audio id={ ctx.props().name.clone() } preload="auto" ref={self.audio_ref.clone()}>
<source src={ ctx.props().audio_file_path(AudioFileType::Mp3) } type="audio/mpeg" />
<source src={ ctx.props().audio_file_path(AudioFileType::Ogg) } type="audio/ogg" />
<p>
{ "can't put it in the pizza. you need an " }
<a href="http://thebrowsereview.com/html5/html5-audio-tag-and-format-support/">
{ "html5 browser" }
</a>
</p>
</audio>
<img src={ctx.props().img_file_path()} onclick={onclick} />
<p>{ &ctx.props().text }</p>
</td>
}
}
}
async fn play_the_thing(promise: Promise) -> Result<JsValue, JsValue> {
wasm_bindgen_futures::JsFuture::from(promise).await
}
pub(crate) enum Msg {
PlayRequest,
Success,
Failure,
}
#[derive(PartialEq, Eq, Properties)]
pub(crate) struct Props {
#[prop_or_default]
pub(crate) text: String,
#[prop_or_default]
pub(crate) name: String,
}
impl Props {
fn img_file_path(&self) -> String {
format!("thumbs/{}.jpg", self.name)
}
fn audio_file_path(&self, file_type: AudioFileType) -> String {
match file_type {
AudioFileType::Ogg => format!("audio/{}.ogg", self.name),
AudioFileType::Mp3 => format!("audio/{}.mp3", self.name),
}
}
}
#[derive(Clone, Copy)]
enum AudioFileType {
Ogg,
Mp3,
}
|
use crate::ast_transform::Transformer;
use crate::scm::Scm;
use crate::sexpr::{Sexpr, TrackedSexpr};
use crate::source::SourceLocation;
use crate::syntax::Reify;
#[derive(Clone)]
pub struct Constant {
pub value: Sexpr,
pub span: SourceLocation,
}
impl_sourced!(Constant);
impl Constant {
pub fn new(value: Sexpr, span: SourceLocation) -> Self {
Constant { value, span }
}
pub fn default_transform(self, _visitor: &mut impl Transformer) -> Self {
self
}
}
impl std::fmt::Debug for Constant {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match &self.value {
Sexpr::SyntacticClosure(sc) => write!(f, "Constant(<{}>)", sc.sexpr().sexpr),
x => write!(f, "Constant({})", x),
}
}
}
impl From<TrackedSexpr> for Constant {
fn from(sexpr: TrackedSexpr) -> Self {
Constant {
value: sexpr.sexpr,
span: sexpr.src,
}
}
}
impl From<Sexpr> for Constant {
fn from(sexpr: Sexpr) -> Self {
Constant {
value: sexpr,
span: SourceLocation::NoSource,
}
}
}
impl Reify for Constant {
fn reify(&self) -> Scm {
let x: Scm = (&self.value).into();
match x {
Scm::Symbol(_) => Scm::list(vec![Scm::symbol("quote"), x]),
_ => x,
}
}
}
|
fn surface_area(h: usize, w: usize, l: usize) -> usize {
2*h*w + 2*h*l + 2*w*l
}
fn smallest_side_area(h: usize, w: usize, l: usize) -> usize {
*[h*w, h*l, w*l].iter().min().unwrap_or(&0)
}
fn smallest_side_perimeter(h: usize, w: usize, l: usize) -> usize {
*[2*h+2*w, 2*h+2*l, 2*w+2*l].iter().min().unwrap_or(&0)
}
fn volume(h: usize, w: usize, l: usize) -> usize {
h*w*l
}
fn ribbon(h: usize, w: usize, l: usize) -> usize {
volume(h,w,l) + smallest_side_perimeter(h,w,l)
}
fn paper(h: usize, w: usize, l: usize) -> usize {
surface_area(h,w,l) + smallest_side_area(h,w,l)
}
fn total(presents: &Vec<String>, material: fn(usize,usize,usize)->usize) -> usize {
let mut total:usize = 0;
for present in presents {
if present.contains("x") {
let dims:Vec<usize> = present
.split(|c: char| c == 'x')
.map(|x| x.parse().unwrap_or_default())
.collect();
total += material(dims[0], dims[1], dims[2])
}
}
total
}
fn readlines() -> Vec<String> {
use std::io::prelude::*;
let stdin = std::io::stdin();
let v = stdin.lock().lines().map(|x| x.unwrap()).collect();
v
}
fn main() {
let input = readlines();
println!("Part 1: {}", total(&input, paper));
println!("Part 1: {}", total(&input, ribbon));
} |
use crate::hex::coordinates::{
direction::{HexagonalDirection, NUM_DIRECTIONS},
HexagonalVector,
};
pub struct RingIter<V: HexagonalVector + HexagonalDirection> {
edge_length: usize,
direction: usize,
next: V,
edge_index: usize,
}
impl<V: HexagonalVector + HexagonalDirection> RingIter<V> {
pub fn new(radius: usize, center: V) -> Self {
Self {
edge_length: radius,
direction: 0,
next: center + V::direction(4) * radius as isize,
edge_index: 1,
}
}
pub fn peek(&mut self) -> Option<&V> {
if self.direction < NUM_DIRECTIONS {
Some(&self.next)
} else {
None
}
}
}
impl<V: HexagonalDirection> Iterator for RingIter<V> {
type Item = V;
fn next(&mut self) -> Option<Self::Item> {
let edge_length = self.edge_length;
let direction = self.direction;
if direction < NUM_DIRECTIONS {
let next = self.next;
self.next = next.neighbor(direction);
let ei = self.edge_index;
if ei < edge_length {
self.edge_index = ei + 1;
} else {
self.edge_index = 1;
self.direction = direction + 1;
while self.direction < NUM_DIRECTIONS && edge_length == 0 {
self.direction += 1;
}
}
Some(next)
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let el = self.edge_length;
if el > 0 {
let length = el * 6;
(length, Some(length))
} else {
(1, Some(1))
}
}
}
pub struct BigRingIter<V: HexagonalDirection> {
edge_length: usize,
direction: usize,
direction_vector: V,
next: V,
edge_index: usize,
cell_radius: usize,
}
impl<V: HexagonalDirection> BigRingIter<V> {
pub fn new(cell_radius: usize, radius: usize, center: V) -> Self {
let direction_vector =
V::direction(0) * (cell_radius as isize + 1) + V::direction(1) * cell_radius as isize;
let next = center
+ (V::direction(4) * (cell_radius as isize + 1)
+ V::direction(5) * cell_radius as isize)
* radius as isize;
Self {
edge_length: radius,
direction: 0,
direction_vector,
next,
edge_index: 1,
cell_radius,
}
}
pub fn peek(&mut self) -> Option<&V> {
if self.direction < 6 {
Some(&self.next)
} else {
None
}
}
}
impl<V: HexagonalDirection> Iterator for BigRingIter<V> {
type Item = V;
fn next(&mut self) -> Option<Self::Item> {
let edge_length = self.edge_length;
let direction = self.direction;
if direction < 6 {
let next = self.next;
self.next = next + self.direction_vector;
let ei = self.edge_index;
if ei < edge_length {
self.edge_index = ei + 1;
} else {
self.edge_index = 1;
self.direction = direction + 1;
while self.direction < NUM_DIRECTIONS && edge_length == 0 {
self.direction += 1;
}
if self.direction < 6 {
self.direction_vector = V::direction(self.direction)
* (self.cell_radius as isize + 1)
+ V::direction((self.direction + 1) % NUM_DIRECTIONS)
* self.cell_radius as isize;
}
}
Some(next)
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let el = self.edge_length;
if el > 0 {
let length = el * 6;
(length, Some(length))
} else {
(1, Some(1))
}
}
}
|
//! Link: https://adventofcode.com/2019/day/4
//! Day 4: Secure Container
//!
//! You arrive at the Venus fuel depot only to discover it's protected by a password.
//! The Elves had written the password on a sticky note, but someone threw it out.
#[aoc_generator(day4)]
fn input_generator(input: &str) -> (u32, u32){
let items = input
.split('-')
.map(str::parse::<u32>)
.filter_map(Result::ok)
.collect::<Vec<_>>();
(items[0], items[1])
}
fn has_adjacent(s: &str) -> bool {
s.chars().zip(s.chars().skip(1)).any(|(c1, c2)| c1 == c2)
}
fn increased_or_same(s: &str) -> bool {
s.chars().zip(s.chars().skip(1)).all(|(c1, c2)| c1 <= c2)
}
// However, they do remember a few key facts about the password:
//
// It is a six-digit number.
// The value is within the range given in your puzzle input.
// Two adjacent digits are the same (like 22 in 122345).
// Going from left to right, the digits never decrease;
// they only ever increase or stay the same (like 111123 or 135679).
#[aoc(day4, part1, Filter)]
fn day4_part1_filter(input: &(u32, u32)) -> usize {
let (from, to) = *input;
(from..to)
.map(|n| format!("{}", n))
.filter(|s| has_adjacent(&s))
.filter(|s| increased_or_same(&s))
.count()
}
fn has_adjacent_part2(s: &str) -> bool {
let s = s.as_bytes();
(0..s.len() - 3).any(|i| s[i + 1] == s[i + 2] && s[i] != s[i + 1] && s[i + 2] != s[i + 3])
|| (s[0] == s[1] && s[1] != s[2])
|| (s[s.len() - 1] == s[s.len() - 2] && s[s.len() - 2] != s[s.len() - 3])
}
// An Elf just remembered one more important detail:
// the two adjacent matching digits are not part of a larger group of matching digits.
#[aoc(day4, part2, Filter)]
fn day4_part2_filter(input: &(u32, u32)) -> usize {
let (from, to) = *input;
(from..to)
.map(|n| format!("{}", n))
.filter(|s| has_adjacent_part2(&s))
.filter(|s| increased_or_same(&s))
.count()
}
#[cfg(test)]
mod tests {
use super::*;
fn test_part1(input: &str) -> bool {
has_adjacent(input) && increased_or_same(input)
}
fn test_part2(input: &str) -> bool {
has_adjacent_part2(input) && increased_or_same(input)
}
#[test]
fn day4_example1() {
let input = "111111";
assert_eq!(true, test_part1(input));
assert_eq!(false, test_part2(input));
assert_eq!(input_generator("111111-111111"), (111111, 111111));
}
#[test]
fn day4_input_alyti() {
let input = input_generator("138241-674034");
assert_eq!(input, (138241, 674034));
assert_eq!(day4_part1_filter(&input), 1890);
assert_eq!(day4_part2_filter(&input), 1277);
}
} |
use nom;
type NomError<'a> = nom::Err<nom::types::CompleteStr<'a>>;
#[derive(Fail, Debug)]
#[fail(display = "Parse error: {:?}", _0)]
pub struct ParseError(nom::ErrorKind);
impl<'a> From<NomError<'a>> for ParseError {
fn from(e: NomError<'a>) -> Self {
let kind = e.into_error_kind();
ParseError(kind)
}
}
|
use super::*;
pick! {
if #[cfg(target_feature="avx2")] {
#[derive(Default, Clone, Copy, PartialEq, Eq)]
#[repr(C, align(32))]
pub struct i8x32 { avx: m256i }
} else if #[cfg(target_feature="sse2")] {
#[derive(Default, Clone, Copy, PartialEq, Eq)]
#[repr(C, align(32))]
pub struct i8x32 { sse0: m128i, sse1: m128i }
} else if #[cfg(target_feature="simd128")] {
use core::arch::wasm32::*;
#[derive(Clone, Copy)]
#[repr(C, align(32))]
pub struct i8x32 { simd0: v128, simd1: v128 }
impl Default for i8x32 {
fn default() -> Self {
Self::splat(0)
}
}
impl PartialEq for i8x32 {
fn eq(&self, other: &Self) -> bool {
!v128_any_true(v128_or(v128_xor(self.simd0, other.simd0), v128_xor(self.simd1, other.simd1)))
}
}
impl Eq for i8x32 { }
} else {
#[derive(Default, Clone, Copy, PartialEq, Eq)]
#[repr(C, align(32))]
pub struct i8x32 { arr: [i8;32] }
}
}
int_uint_consts!(i8, 32, i8x32, i8x32, i8a32, const_i8_as_i8x32, 256);
unsafe impl Zeroable for i8x32 {}
unsafe impl Pod for i8x32 {}
impl Add for i8x32 {
type Output = Self;
#[inline]
#[must_use]
fn add(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx2")] {
Self { avx: add_i8_m256i(self.avx,rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: add_i8_m128i(self.sse0, rhs.sse0), sse1: add_i8_m128i(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: i8x16_add(self.simd0, rhs.simd0), simd1: i8x16_add(self.simd1, rhs.simd1) }
} else {
Self { arr: [
self.arr[0].wrapping_add(rhs.arr[0]),
self.arr[1].wrapping_add(rhs.arr[1]),
self.arr[2].wrapping_add(rhs.arr[2]),
self.arr[3].wrapping_add(rhs.arr[3]),
self.arr[4].wrapping_add(rhs.arr[4]),
self.arr[5].wrapping_add(rhs.arr[5]),
self.arr[6].wrapping_add(rhs.arr[6]),
self.arr[7].wrapping_add(rhs.arr[7]),
self.arr[8].wrapping_add(rhs.arr[8]),
self.arr[9].wrapping_add(rhs.arr[9]),
self.arr[10].wrapping_add(rhs.arr[10]),
self.arr[11].wrapping_add(rhs.arr[11]),
self.arr[12].wrapping_add(rhs.arr[12]),
self.arr[13].wrapping_add(rhs.arr[13]),
self.arr[14].wrapping_add(rhs.arr[14]),
self.arr[15].wrapping_add(rhs.arr[15]),
self.arr[16].wrapping_add(rhs.arr[16]),
self.arr[17].wrapping_add(rhs.arr[17]),
self.arr[18].wrapping_add(rhs.arr[18]),
self.arr[19].wrapping_add(rhs.arr[19]),
self.arr[20].wrapping_add(rhs.arr[20]),
self.arr[21].wrapping_add(rhs.arr[21]),
self.arr[22].wrapping_add(rhs.arr[22]),
self.arr[23].wrapping_add(rhs.arr[23]),
self.arr[24].wrapping_add(rhs.arr[24]),
self.arr[25].wrapping_add(rhs.arr[25]),
self.arr[26].wrapping_add(rhs.arr[26]),
self.arr[27].wrapping_add(rhs.arr[27]),
self.arr[28].wrapping_add(rhs.arr[28]),
self.arr[29].wrapping_add(rhs.arr[29]),
self.arr[30].wrapping_add(rhs.arr[30]),
self.arr[31].wrapping_add(rhs.arr[31]),
]}
}
}
}
}
impl Sub for i8x32 {
type Output = Self;
#[inline]
#[must_use]
fn sub(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx2")] {
Self { avx: sub_i8_m256i(self.avx,rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: sub_i8_m128i(self.sse0, rhs.sse0), sse1: sub_i8_m128i(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: i8x16_sub(self.simd0, rhs.simd0), simd1: i8x16_sub(self.simd1, rhs.simd1) }
} else {
Self { arr: [
self.arr[0].wrapping_sub(rhs.arr[0]),
self.arr[1].wrapping_sub(rhs.arr[1]),
self.arr[2].wrapping_sub(rhs.arr[2]),
self.arr[3].wrapping_sub(rhs.arr[3]),
self.arr[4].wrapping_sub(rhs.arr[4]),
self.arr[5].wrapping_sub(rhs.arr[5]),
self.arr[6].wrapping_sub(rhs.arr[6]),
self.arr[7].wrapping_sub(rhs.arr[7]),
self.arr[8].wrapping_sub(rhs.arr[8]),
self.arr[9].wrapping_sub(rhs.arr[9]),
self.arr[10].wrapping_sub(rhs.arr[10]),
self.arr[11].wrapping_sub(rhs.arr[11]),
self.arr[12].wrapping_sub(rhs.arr[12]),
self.arr[13].wrapping_sub(rhs.arr[13]),
self.arr[14].wrapping_sub(rhs.arr[14]),
self.arr[15].wrapping_sub(rhs.arr[15]),
self.arr[16].wrapping_sub(rhs.arr[16]),
self.arr[17].wrapping_sub(rhs.arr[17]),
self.arr[18].wrapping_sub(rhs.arr[18]),
self.arr[19].wrapping_sub(rhs.arr[19]),
self.arr[20].wrapping_sub(rhs.arr[20]),
self.arr[21].wrapping_sub(rhs.arr[21]),
self.arr[22].wrapping_sub(rhs.arr[22]),
self.arr[23].wrapping_sub(rhs.arr[23]),
self.arr[24].wrapping_sub(rhs.arr[24]),
self.arr[25].wrapping_sub(rhs.arr[25]),
self.arr[26].wrapping_sub(rhs.arr[26]),
self.arr[27].wrapping_sub(rhs.arr[27]),
self.arr[28].wrapping_sub(rhs.arr[28]),
self.arr[29].wrapping_sub(rhs.arr[29]),
self.arr[30].wrapping_sub(rhs.arr[30]),
self.arr[31].wrapping_sub(rhs.arr[31]),
]}
}
}
}
}
impl Add<i8> for i8x32 {
type Output = Self;
#[inline]
#[must_use]
fn add(self, rhs: i8) -> Self::Output {
self.add(Self::splat(rhs))
}
}
impl Sub<i8> for i8x32 {
type Output = Self;
#[inline]
#[must_use]
fn sub(self, rhs: i8) -> Self::Output {
self.sub(Self::splat(rhs))
}
}
impl Add<i8x32> for i8 {
type Output = i8x32;
#[inline]
#[must_use]
fn add(self, rhs: i8x32) -> Self::Output {
i8x32::splat(self).add(rhs)
}
}
impl Sub<i8x32> for i8 {
type Output = i8x32;
#[inline]
#[must_use]
fn sub(self, rhs: i8x32) -> Self::Output {
i8x32::splat(self).sub(rhs)
}
}
impl BitAnd for i8x32 {
type Output = Self;
#[inline]
#[must_use]
fn bitand(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx2")] {
Self { avx : bitand_m256i(self.avx,rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: bitand_m128i(self.sse0, rhs.sse0), sse1: bitand_m128i(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: v128_and(self.simd0, rhs.simd0), simd1: v128_and(self.simd1, rhs.simd1) }
} else {
Self { arr: [
self.arr[0].bitand(rhs.arr[0]),
self.arr[1].bitand(rhs.arr[1]),
self.arr[2].bitand(rhs.arr[2]),
self.arr[3].bitand(rhs.arr[3]),
self.arr[4].bitand(rhs.arr[4]),
self.arr[5].bitand(rhs.arr[5]),
self.arr[6].bitand(rhs.arr[6]),
self.arr[7].bitand(rhs.arr[7]),
self.arr[8].bitand(rhs.arr[8]),
self.arr[9].bitand(rhs.arr[9]),
self.arr[10].bitand(rhs.arr[10]),
self.arr[11].bitand(rhs.arr[11]),
self.arr[12].bitand(rhs.arr[12]),
self.arr[13].bitand(rhs.arr[13]),
self.arr[14].bitand(rhs.arr[14]),
self.arr[15].bitand(rhs.arr[15]),
self.arr[16].bitand(rhs.arr[16]),
self.arr[17].bitand(rhs.arr[17]),
self.arr[18].bitand(rhs.arr[18]),
self.arr[19].bitand(rhs.arr[19]),
self.arr[20].bitand(rhs.arr[20]),
self.arr[21].bitand(rhs.arr[21]),
self.arr[22].bitand(rhs.arr[22]),
self.arr[23].bitand(rhs.arr[23]),
self.arr[24].bitand(rhs.arr[24]),
self.arr[25].bitand(rhs.arr[25]),
self.arr[26].bitand(rhs.arr[26]),
self.arr[27].bitand(rhs.arr[27]),
self.arr[28].bitand(rhs.arr[28]),
self.arr[29].bitand(rhs.arr[29]),
self.arr[30].bitand(rhs.arr[30]),
self.arr[31].bitand(rhs.arr[31]),
]}
}
}
}
}
impl BitOr for i8x32 {
type Output = Self;
#[inline]
#[must_use]
fn bitor(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx2")] {
Self { avx : bitor_m256i(self.avx,rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: bitor_m128i(self.sse0, rhs.sse0), sse1: bitor_m128i(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: v128_or(self.simd0, rhs.simd0), simd1: v128_or(self.simd1, rhs.simd1) }
} else {
Self { arr: [
self.arr[0].bitor(rhs.arr[0]),
self.arr[1].bitor(rhs.arr[1]),
self.arr[2].bitor(rhs.arr[2]),
self.arr[3].bitor(rhs.arr[3]),
self.arr[4].bitor(rhs.arr[4]),
self.arr[5].bitor(rhs.arr[5]),
self.arr[6].bitor(rhs.arr[6]),
self.arr[7].bitor(rhs.arr[7]),
self.arr[8].bitor(rhs.arr[8]),
self.arr[9].bitor(rhs.arr[9]),
self.arr[10].bitor(rhs.arr[10]),
self.arr[11].bitor(rhs.arr[11]),
self.arr[12].bitor(rhs.arr[12]),
self.arr[13].bitor(rhs.arr[13]),
self.arr[14].bitor(rhs.arr[14]),
self.arr[15].bitor(rhs.arr[15]),
self.arr[16].bitor(rhs.arr[16]),
self.arr[17].bitor(rhs.arr[17]),
self.arr[18].bitor(rhs.arr[18]),
self.arr[19].bitor(rhs.arr[19]),
self.arr[20].bitor(rhs.arr[20]),
self.arr[21].bitor(rhs.arr[21]),
self.arr[22].bitor(rhs.arr[22]),
self.arr[23].bitor(rhs.arr[23]),
self.arr[24].bitor(rhs.arr[24]),
self.arr[25].bitor(rhs.arr[25]),
self.arr[26].bitor(rhs.arr[26]),
self.arr[27].bitor(rhs.arr[27]),
self.arr[28].bitor(rhs.arr[28]),
self.arr[29].bitor(rhs.arr[29]),
self.arr[30].bitor(rhs.arr[30]),
self.arr[31].bitor(rhs.arr[31]),
]}
}
}
}
}
impl BitXor for i8x32 {
type Output = Self;
#[inline]
#[must_use]
fn bitxor(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx2")] {
Self { avx : bitxor_m256i(self.avx,rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: bitxor_m128i(self.sse0, rhs.sse0), sse1: bitxor_m128i(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: v128_xor(self.simd0, rhs.simd0), simd1: v128_xor(self.simd1, rhs.simd1) }
} else {
Self { arr: [
self.arr[0].bitxor(rhs.arr[0]),
self.arr[1].bitxor(rhs.arr[1]),
self.arr[2].bitxor(rhs.arr[2]),
self.arr[3].bitxor(rhs.arr[3]),
self.arr[4].bitxor(rhs.arr[4]),
self.arr[5].bitxor(rhs.arr[5]),
self.arr[6].bitxor(rhs.arr[6]),
self.arr[7].bitxor(rhs.arr[7]),
self.arr[8].bitxor(rhs.arr[8]),
self.arr[9].bitxor(rhs.arr[9]),
self.arr[10].bitxor(rhs.arr[10]),
self.arr[11].bitxor(rhs.arr[11]),
self.arr[12].bitxor(rhs.arr[12]),
self.arr[13].bitxor(rhs.arr[13]),
self.arr[14].bitxor(rhs.arr[14]),
self.arr[15].bitxor(rhs.arr[15]),
self.arr[16].bitxor(rhs.arr[16]),
self.arr[17].bitxor(rhs.arr[17]),
self.arr[18].bitxor(rhs.arr[18]),
self.arr[19].bitxor(rhs.arr[19]),
self.arr[20].bitxor(rhs.arr[20]),
self.arr[21].bitxor(rhs.arr[21]),
self.arr[22].bitxor(rhs.arr[22]),
self.arr[23].bitxor(rhs.arr[23]),
self.arr[24].bitxor(rhs.arr[24]),
self.arr[25].bitxor(rhs.arr[25]),
self.arr[26].bitxor(rhs.arr[26]),
self.arr[27].bitxor(rhs.arr[27]),
self.arr[28].bitxor(rhs.arr[28]),
self.arr[29].bitxor(rhs.arr[29]),
self.arr[30].bitxor(rhs.arr[30]),
self.arr[31].bitxor(rhs.arr[31]),
]}
}
}
}
}
impl CmpEq for i8x32 {
type Output = Self;
#[inline]
#[must_use]
fn cmp_eq(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx2")] {
Self { avx : cmp_eq_mask_i8_m256i(self.avx,rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: cmp_eq_mask_i8_m128i(self.sse0, rhs.sse0), sse1: cmp_eq_mask_i8_m128i(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: i8x16_eq(self.simd0, rhs.simd0), simd1: i8x16_eq(self.simd1, rhs.simd1) }
} else {
Self { arr: [
if self.arr[0] == rhs.arr[0] { -1 } else { 0 },
if self.arr[1] == rhs.arr[1] { -1 } else { 0 },
if self.arr[2] == rhs.arr[2] { -1 } else { 0 },
if self.arr[3] == rhs.arr[3] { -1 } else { 0 },
if self.arr[4] == rhs.arr[4] { -1 } else { 0 },
if self.arr[5] == rhs.arr[5] { -1 } else { 0 },
if self.arr[6] == rhs.arr[6] { -1 } else { 0 },
if self.arr[7] == rhs.arr[7] { -1 } else { 0 },
if self.arr[8] == rhs.arr[8] { -1 } else { 0 },
if self.arr[9] == rhs.arr[9] { -1 } else { 0 },
if self.arr[10] == rhs.arr[10] { -1 } else { 0 },
if self.arr[11] == rhs.arr[11] { -1 } else { 0 },
if self.arr[12] == rhs.arr[12] { -1 } else { 0 },
if self.arr[13] == rhs.arr[13] { -1 } else { 0 },
if self.arr[14] == rhs.arr[14] { -1 } else { 0 },
if self.arr[15] == rhs.arr[15] { -1 } else { 0 },
if self.arr[16] == rhs.arr[16] { -1 } else { 0 },
if self.arr[17] == rhs.arr[17] { -1 } else { 0 },
if self.arr[18] == rhs.arr[18] { -1 } else { 0 },
if self.arr[19] == rhs.arr[19] { -1 } else { 0 },
if self.arr[20] == rhs.arr[20] { -1 } else { 0 },
if self.arr[21] == rhs.arr[21] { -1 } else { 0 },
if self.arr[22] == rhs.arr[22] { -1 } else { 0 },
if self.arr[23] == rhs.arr[23] { -1 } else { 0 },
if self.arr[24] == rhs.arr[24] { -1 } else { 0 },
if self.arr[25] == rhs.arr[25] { -1 } else { 0 },
if self.arr[26] == rhs.arr[26] { -1 } else { 0 },
if self.arr[27] == rhs.arr[27] { -1 } else { 0 },
if self.arr[28] == rhs.arr[28] { -1 } else { 0 },
if self.arr[29] == rhs.arr[29] { -1 } else { 0 },
if self.arr[30] == rhs.arr[30] { -1 } else { 0 },
if self.arr[31] == rhs.arr[31] { -1 } else { 0 },
]}
}
}
}
}
impl CmpGt for i8x32 {
type Output = Self;
#[inline]
#[must_use]
fn cmp_gt(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx2")] {
Self { avx : cmp_gt_mask_i8_m256i(self.avx,rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: cmp_gt_mask_i8_m128i(self.sse0, rhs.sse0), sse1: cmp_gt_mask_i8_m128i(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: i8x16_gt(self.simd0, rhs.simd0), simd1: i8x16_gt(self.simd1, rhs.simd1) }
} else {
Self { arr: [
if self.arr[0] > rhs.arr[0] { -1 } else { 0 },
if self.arr[1] > rhs.arr[1] { -1 } else { 0 },
if self.arr[2] > rhs.arr[2] { -1 } else { 0 },
if self.arr[3] > rhs.arr[3] { -1 } else { 0 },
if self.arr[4] > rhs.arr[4] { -1 } else { 0 },
if self.arr[5] > rhs.arr[5] { -1 } else { 0 },
if self.arr[6] > rhs.arr[6] { -1 } else { 0 },
if self.arr[7] > rhs.arr[7] { -1 } else { 0 },
if self.arr[8] > rhs.arr[8] { -1 } else { 0 },
if self.arr[9] > rhs.arr[9] { -1 } else { 0 },
if self.arr[10] > rhs.arr[10] { -1 } else { 0 },
if self.arr[11] > rhs.arr[11] { -1 } else { 0 },
if self.arr[12] > rhs.arr[12] { -1 } else { 0 },
if self.arr[13] > rhs.arr[13] { -1 } else { 0 },
if self.arr[14] > rhs.arr[14] { -1 } else { 0 },
if self.arr[15] > rhs.arr[15] { -1 } else { 0 },
if self.arr[16] > rhs.arr[16] { -1 } else { 0 },
if self.arr[17] > rhs.arr[17] { -1 } else { 0 },
if self.arr[18] > rhs.arr[18] { -1 } else { 0 },
if self.arr[19] > rhs.arr[19] { -1 } else { 0 },
if self.arr[20] > rhs.arr[20] { -1 } else { 0 },
if self.arr[21] > rhs.arr[21] { -1 } else { 0 },
if self.arr[22] > rhs.arr[22] { -1 } else { 0 },
if self.arr[23] > rhs.arr[23] { -1 } else { 0 },
if self.arr[24] > rhs.arr[24] { -1 } else { 0 },
if self.arr[25] > rhs.arr[25] { -1 } else { 0 },
if self.arr[26] > rhs.arr[26] { -1 } else { 0 },
if self.arr[27] > rhs.arr[27] { -1 } else { 0 },
if self.arr[28] > rhs.arr[28] { -1 } else { 0 },
if self.arr[29] > rhs.arr[29] { -1 } else { 0 },
if self.arr[30] > rhs.arr[30] { -1 } else { 0 },
if self.arr[31] > rhs.arr[31] { -1 } else { 0 },
]}
}
}
}
}
impl CmpLt for i8x32 {
type Output = Self;
#[inline]
#[must_use]
fn cmp_lt(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx2")] {
Self { avx : !(cmp_gt_mask_i8_m256i(self.avx,rhs.avx) ^ cmp_eq_mask_i8_m256i(self.avx,rhs.avx)) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: cmp_lt_mask_i8_m128i(self.sse0, rhs.sse0), sse1: cmp_lt_mask_i8_m128i(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: i8x16_lt(self.simd0, rhs.simd0), simd1: i8x16_lt(self.simd1, rhs.simd1) }
} else {
Self { arr: [
if self.arr[0] < rhs.arr[0] { -1 } else { 0 },
if self.arr[1] < rhs.arr[1] { -1 } else { 0 },
if self.arr[2] < rhs.arr[2] { -1 } else { 0 },
if self.arr[3] < rhs.arr[3] { -1 } else { 0 },
if self.arr[4] < rhs.arr[4] { -1 } else { 0 },
if self.arr[5] < rhs.arr[5] { -1 } else { 0 },
if self.arr[6] < rhs.arr[6] { -1 } else { 0 },
if self.arr[7] < rhs.arr[7] { -1 } else { 0 },
if self.arr[8] < rhs.arr[8] { -1 } else { 0 },
if self.arr[9] < rhs.arr[9] { -1 } else { 0 },
if self.arr[10] < rhs.arr[10] { -1 } else { 0 },
if self.arr[11] < rhs.arr[11] { -1 } else { 0 },
if self.arr[12] < rhs.arr[12] { -1 } else { 0 },
if self.arr[13] < rhs.arr[13] { -1 } else { 0 },
if self.arr[14] < rhs.arr[14] { -1 } else { 0 },
if self.arr[15] < rhs.arr[15] { -1 } else { 0 },
if self.arr[16] < rhs.arr[16] { -1 } else { 0 },
if self.arr[17] < rhs.arr[17] { -1 } else { 0 },
if self.arr[18] < rhs.arr[18] { -1 } else { 0 },
if self.arr[19] < rhs.arr[19] { -1 } else { 0 },
if self.arr[20] < rhs.arr[20] { -1 } else { 0 },
if self.arr[21] < rhs.arr[21] { -1 } else { 0 },
if self.arr[22] < rhs.arr[22] { -1 } else { 0 },
if self.arr[23] < rhs.arr[23] { -1 } else { 0 },
if self.arr[24] < rhs.arr[24] { -1 } else { 0 },
if self.arr[25] < rhs.arr[25] { -1 } else { 0 },
if self.arr[26] < rhs.arr[26] { -1 } else { 0 },
if self.arr[27] < rhs.arr[27] { -1 } else { 0 },
if self.arr[28] < rhs.arr[28] { -1 } else { 0 },
if self.arr[29] < rhs.arr[29] { -1 } else { 0 },
if self.arr[30] < rhs.arr[30] { -1 } else { 0 },
if self.arr[31] < rhs.arr[31] { -1 } else { 0 },
]}
}
}
}
}
impl i8x32 {
#[inline]
#[must_use]
pub fn blend(self, t: Self, f: Self) -> Self {
pick! {
if #[cfg(target_feature="avx2")] {
Self { avx: blend_varying_i8_m256i(f.avx, t.avx, self.avx) }
} else if #[cfg(target_feature="sse4.1")] {
Self { sse0: blend_varying_i8_m128i(f.sse0, t.sse0, self.sse0), sse1: blend_varying_i8_m128i(f.sse1, t.sse1, self.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: v128_bitselect(t.simd0, f.simd0, self.simd0), simd1: v128_bitselect(t.simd1, f.simd1, self.simd1) }
} else {
generic_bit_blend(self, t, f)
}
}
}
#[inline]
#[must_use]
pub fn abs(self) -> Self {
pick! {
if #[cfg(target_feature="avx2")] {
Self { avx: abs_i8_m256i(self.avx) }
} else if #[cfg(target_feature="ssse3")] {
Self { sse0: abs_i8_m128i(self.sse0), sse1: abs_i8_m128i(self.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: i8x16_abs(self.simd0), simd1: i8x16_abs(self.simd1) }
} else {
let arr: [i8; 32] = cast(self);
cast([
arr[0].wrapping_abs(),
arr[1].wrapping_abs(),
arr[2].wrapping_abs(),
arr[3].wrapping_abs(),
arr[4].wrapping_abs(),
arr[5].wrapping_abs(),
arr[6].wrapping_abs(),
arr[7].wrapping_abs(),
arr[8].wrapping_abs(),
arr[9].wrapping_abs(),
arr[10].wrapping_abs(),
arr[11].wrapping_abs(),
arr[12].wrapping_abs(),
arr[13].wrapping_abs(),
arr[14].wrapping_abs(),
arr[15].wrapping_abs(),
arr[16].wrapping_abs(),
arr[17].wrapping_abs(),
arr[18].wrapping_abs(),
arr[19].wrapping_abs(),
arr[20].wrapping_abs(),
arr[21].wrapping_abs(),
arr[22].wrapping_abs(),
arr[23].wrapping_abs(),
arr[24].wrapping_abs(),
arr[25].wrapping_abs(),
arr[26].wrapping_abs(),
arr[27].wrapping_abs(),
arr[28].wrapping_abs(),
arr[29].wrapping_abs(),
arr[30].wrapping_abs(),
arr[31].wrapping_abs(),
])
}
}
}
#[inline]
#[must_use]
pub fn max(self, rhs: Self) -> Self {
pick! {
if #[cfg(target_feature="avx2")] {
Self { avx: max_i8_m256i(self.avx,rhs.avx) }
} else if #[cfg(target_feature="sse4.1")] {
Self { sse0: max_i8_m128i(self.sse0,rhs.sse0), sse1: max_i8_m128i(self.sse1,rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: i8x16_max(self.simd0,rhs.simd0), simd1: i8x16_max(self.simd1,rhs.simd1) }
} else {
self.cmp_lt(rhs).blend(rhs, self)
}
}
}
#[inline]
#[must_use]
pub fn min(self, rhs: Self) -> Self {
pick! {
if #[cfg(target_feature="avx2")] {
Self { avx: min_i8_m256i(self.avx,rhs.avx) }
} else if #[cfg(target_feature="sse4.1")] {
Self { sse0: min_i8_m128i(self.sse0,rhs.sse0), sse1: min_i8_m128i(self.sse1,rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: i8x16_min(self.simd0,rhs.simd0), simd1: i8x16_min(self.simd1,rhs.simd1) }
} else {
self.cmp_lt(rhs).blend(self, rhs)
}
}
}
pub fn to_array(self) -> [i8; 32] {
cast(self)
}
}
|
#[doc = "Register `RGCFR` reader"]
pub type R = crate::R<RGCFR_SPEC>;
#[doc = "Register `RGCFR` writer"]
pub type W = crate::W<RGCFR_SPEC>;
#[doc = "Field `CSOF0` reader - Generator Clear Overrun Flag 0"]
pub type CSOF0_R = crate::BitReader;
#[doc = "Field `CSOF0` writer - Generator Clear Overrun Flag 0"]
pub type CSOF0_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CSOF1` reader - Generator Clear Overrun Flag 1"]
pub type CSOF1_R = crate::BitReader;
#[doc = "Field `CSOF1` writer - Generator Clear Overrun Flag 1"]
pub type CSOF1_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CSOF2` reader - Generator Clear Overrun Flag 2"]
pub type CSOF2_R = crate::BitReader;
#[doc = "Field `CSOF2` writer - Generator Clear Overrun Flag 2"]
pub type CSOF2_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `CSOF3` reader - Generator Clear Overrun Flag 3"]
pub type CSOF3_R = crate::BitReader;
#[doc = "Field `CSOF3` writer - Generator Clear Overrun Flag 3"]
pub type CSOF3_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - Generator Clear Overrun Flag 0"]
#[inline(always)]
pub fn csof0(&self) -> CSOF0_R {
CSOF0_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Generator Clear Overrun Flag 1"]
#[inline(always)]
pub fn csof1(&self) -> CSOF1_R {
CSOF1_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Generator Clear Overrun Flag 2"]
#[inline(always)]
pub fn csof2(&self) -> CSOF2_R {
CSOF2_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - Generator Clear Overrun Flag 3"]
#[inline(always)]
pub fn csof3(&self) -> CSOF3_R {
CSOF3_R::new(((self.bits >> 3) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Generator Clear Overrun Flag 0"]
#[inline(always)]
#[must_use]
pub fn csof0(&mut self) -> CSOF0_W<RGCFR_SPEC, 0> {
CSOF0_W::new(self)
}
#[doc = "Bit 1 - Generator Clear Overrun Flag 1"]
#[inline(always)]
#[must_use]
pub fn csof1(&mut self) -> CSOF1_W<RGCFR_SPEC, 1> {
CSOF1_W::new(self)
}
#[doc = "Bit 2 - Generator Clear Overrun Flag 2"]
#[inline(always)]
#[must_use]
pub fn csof2(&mut self) -> CSOF2_W<RGCFR_SPEC, 2> {
CSOF2_W::new(self)
}
#[doc = "Bit 3 - Generator Clear Overrun Flag 3"]
#[inline(always)]
#[must_use]
pub fn csof3(&mut self) -> CSOF3_W<RGCFR_SPEC, 3> {
CSOF3_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DMA Request Generator Clear Flag Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rgcfr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rgcfr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RGCFR_SPEC;
impl crate::RegisterSpec for RGCFR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rgcfr::R`](R) reader structure"]
impl crate::Readable for RGCFR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`rgcfr::W`](W) writer structure"]
impl crate::Writable for RGCFR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets RGCFR to value 0"]
impl crate::Resettable for RGCFR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// Copyright 2020 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
use crate::hashtypes::*;
// decodes binary encoded data into its separate entities
pub struct BytesDecoder<'a> {
data: &'a [u8],
}
impl BytesDecoder<'_> {
// constructs a decoder
pub fn new(data: &[u8]) -> BytesDecoder {
BytesDecoder { data: data }
}
// decodes an ScAddress from the byte buffer
pub fn address(&mut self) -> ScAddress {
ScAddress::from_bytes(self.bytes())
}
// decodes an ScAgentId from the byte buffer
pub fn agent_id(&mut self) -> ScAgentId {
ScAgentId::from_bytes(self.bytes())
}
// decodes the next substring of bytes from the byte buffer
pub fn bytes(&mut self) -> &[u8] {
let size = self.int() as usize;
if self.data.len() < size {
panic!("Cannot decode bytes");
}
let value = &self.data[..size];
self.data = &self.data[size..];
value
}
// decodes an ScChainId from the byte buffer
pub fn chain_id(&mut self) -> ScChainId {
ScChainId::from_bytes(self.bytes())
}
// decodes an ScColor from the byte buffer
pub fn color(&mut self) -> ScColor {
ScColor::from_bytes(self.bytes())
}
// decodes an ScContractId from the byte buffer
pub fn contract_id(&mut self) -> ScContractId {
ScContractId::from_bytes(self.bytes())
}
// decodes an ScHash from the byte buffer
pub fn hash(&mut self) -> ScHash {
ScHash::from_bytes(self.bytes())
}
// decodes an ScHname from the byte buffer
pub fn hname(&mut self) -> ScHname {
ScHname::from_bytes(self.bytes())
}
// decodes an int64 from the byte buffer
// note that ints are encoded using leb128 encoding
pub fn int(&mut self) -> i64 {
// leb128 decoder
let mut val = 0_i64;
let mut s = 0;
loop {
let mut b = self.data[0] as i8;
self.data = &self.data[1..];
val |= ((b & 0x7f) as i64) << s;
if b >= 0 {
if ((val >> s) as i8) & 0x7f != b & 0x7f {
panic!("Integer too large");
}
// extend int7 sign to int8
if (b & 0x40) != 0 {
b |= -0x80
}
// extend int8 sign to int64
return val | ((b as i64) << s);
}
s += 7;
if s >= 64 {
panic!("integer representation too long");
}
}
}
// decodes an UTF-8 text string from the byte buffer
pub fn string(&mut self) -> String {
String::from_utf8_lossy(self.bytes()).to_string()
}
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
// encodes entities into a binary data buffer
pub struct BytesEncoder {
data: Vec<u8>,
}
impl BytesEncoder {
// constructs an encoder
pub fn new() -> BytesEncoder {
BytesEncoder { data: Vec::new() }
}
// encodes an ScAddress into the byte buffer
pub fn address(&mut self, value: &ScAddress) -> &BytesEncoder {
self.bytes(value.to_bytes());
self
}
// encodes an ScAgentId into the byte buffer
pub fn agent_id(&mut self, value: &ScAgentId) -> &BytesEncoder {
self.bytes(value.to_bytes());
self
}
// encodes a substring of bytes into the byte buffer
pub fn bytes(&mut self, value: &[u8]) -> &BytesEncoder {
self.int(value.len() as i64);
self.data.extend_from_slice(value);
self
}
// encodes an ScChainId into the byte buffer
pub fn chain_id(&mut self, value: &ScChainId) -> &BytesEncoder {
self.bytes(value.to_bytes());
self
}
// encodes an ScColor into the byte buffer
pub fn color(&mut self, value: &ScColor) -> &BytesEncoder {
self.bytes(value.to_bytes());
self
}
// encodes an ScContractId into the byte buffer
pub fn contract_id(&mut self, value: &ScContractId) -> &BytesEncoder {
self.bytes(value.to_bytes());
self
}
// retrieve the encoded byte buffer
pub fn data(&self) -> Vec<u8> {
self.data.clone()
}
// encodes an ScHash into the byte buffer
pub fn hash(&mut self, value: &ScHash) -> &BytesEncoder {
self.bytes(value.to_bytes());
self
}
// encodes an ScHname into the byte buffer
pub fn hname(&mut self, value: &ScHname) -> &BytesEncoder {
self.bytes(&value.to_bytes());
self
}
// encodes an int64 into the byte buffer
// note that ints are encoded using leb128 encoding
pub fn int(&mut self, mut val: i64) -> &BytesEncoder {
// leb128 encoder
loop {
let b = val as u8;
let s = b & 0x40;
val >>= 7;
if (val == 0 && s == 0) || (val == -1 && s != 0) {
self.data.push(b & 0x7f);
return self;
}
self.data.push(b | 0x80)
}
}
// encodes an UTF-8 text string into the byte buffer
pub fn string(&mut self, value: &str) -> &BytesEncoder {
self.bytes(value.as_bytes());
self
}
}
|
#![feature(arbitrary_self_types, futures_api, pin)]
use std::future::{Future, FutureObj};
use std::mem::PinMut;
use std::sync::{Arc, Mutex};
use std::task::{Context, Poll, Waker};
use std::thread;
use std::time::Duration;
/// State shared between the future and the timer thread
struct SharedState {
waker: Option<Waker>,
completed: bool,
}
impl SharedState {
fn new() -> Self {
SharedState {
waker: None,
completed: false,
}
}
}
/// Wrapper that periodically wakes a future
///
/// This can be useful for futures that have a ready condition that needs to be checked periodically
/// because no other event that would trigger a wake.
pub struct WakeInterval<'a, T> {
first: bool, // `true` on the first call to `poll()`
interval: Duration, // time to wait between wakes
future: FutureObj<'a, T>, // inner future that we want to wake and poll
shared_state: Arc<Mutex<SharedState>>, // shared with the timer thread
}
impl<'a, T> WakeInterval<'a, T> {
pub fn new(interval: Duration, future: FutureObj<'a, T>) -> Self {
WakeInterval {
first: true,
interval,
future,
shared_state: Arc::new(Mutex::new(SharedState::new())),
}
}
}
impl<'a, T> Future for WakeInterval<'a, T> {
type Output = T;
fn poll(mut self: PinMut<Self>, cx: &mut Context) -> Poll<<Self as Future>::Output> {
// The waker may change between calls to `poll()` so we must update the shared_state
{
let mut shared_state = self.shared_state.lock().unwrap();
shared_state.waker = Some(cx.waker().clone());
}
// Creates a new thread that will act as a timer to wake the task
if self.first {
self.first = false;
// These will be moved into the thread
let duration = self.interval.clone();
let shared_state = self.shared_state.clone();
thread::spawn(move || loop {
thread::sleep(duration);
let shared_state = shared_state.lock().unwrap();
if shared_state.completed {
return;
}
if let Some(ref waker) = shared_state.waker {
waker.wake();
}
});
}
// Poll the inner future
match PinMut::new(&mut self.future).poll(cx) {
Poll::Ready(val) => {
// Signal the looping thread that we are done
let mut shared_state = self.shared_state.lock().unwrap();
shared_state.completed = true;
Poll::Ready(val)
}
Poll::Pending => Poll::Pending,
}
}
}
|
#[derive(Clone, Copy, PartialEq)]
enum Cell {
Floor,
Empty,
Occupied,
}
pub fn solve_part_one(input: &str) -> usize {
let mut grid = process(input);
let mut is_stable = false;
while !is_stable {
let mut grid_new = grid.clone();
is_stable = true;
for y in 0..grid.len() {
for x in 0.. grid[y].len() {
grid_new[y][x] = new_state(&grid, x, y);
if grid_new[y][x] != grid[y][x] {
is_stable = false;
}
}
}
grid = grid_new;
}
count_occupied(&grid)
}
fn process(input: &str) -> Vec<Vec<Cell>> {
input
.lines()
.map(|line| line
.chars()
.map(|c| match c {
'.' => Cell::Floor,
'L' => Cell::Empty,
'#' => Cell::Occupied,
_ => panic!("unrecognised char in input"),
})
.collect()
)
.collect()
}
fn new_state(grid: &Vec<Vec<Cell>>, x: usize, y: usize) -> Cell {
let x_min = if x == 0 { 0 } else { x - 1 };
let x_max = if x + 1 == grid[0].len() { x + 1 } else { x + 2 };
let y_min = if y == 0 { 0 } else { y - 1 };
let y_max = if y + 1 == grid.len() { y + 1 } else { y + 2 };
let mut occupied = 0;
for i in y_min..y_max {
for j in x_min..x_max {
if !(i == y && j == x) && grid[i][j] == Cell::Occupied {
occupied += 1;
}
}
}
match grid[y][x] {
Cell::Empty => if occupied == 0 { Cell::Occupied } else { grid[y][x] },
Cell::Occupied => if occupied >= 4 { Cell::Empty } else { grid[y][x] },
_ => grid[y][x],
}
}
fn count_occupied(grid: &Vec<Vec<Cell>>) -> usize {
grid
.into_iter()
.fold(0, |sum, row| {
sum + row
.into_iter()
.fold(0, |row_sum, cell| {
match cell {
Cell::Occupied => row_sum + 1,
_ => row_sum,
}
})
})
}
pub fn solve(input: &str) -> usize {
let mut grid = process(input);
let mut is_stable = false;
while !is_stable {
let mut grid_new = grid.clone();
is_stable = true;
for y in 0..grid.len() {
for x in 0.. grid[y].len() {
grid_new[y][x] = new_state_sightlines(&grid, x, y);
if grid_new[y][x] != grid[y][x] {
is_stable = false;
}
}
}
grid = grid_new;
}
count_occupied(&grid)
}
fn new_state_sightlines(grid: &Vec<Vec<Cell>>, x: usize, y: usize) -> Cell {
let mut occupied = 0;
for dy in -1..2 as isize {
for dx in -1..2 as isize {
if !(dy == 0 && dx == 0) {
let mut found = false;
let mut i: isize = 1;
while !found {
let search_y = (y as isize) + i * dy;
let search_x = (x as isize) + i * dx;
if search_y < 0 || search_y >= grid.len() as isize || search_x < 0 || search_x >= grid[0].len() as isize {
found = true;
}
else if grid[search_y as usize][search_x as usize] == Cell::Empty {
found = true;
}
else if grid[search_y as usize][search_x as usize] == Cell::Occupied {
occupied += 1;
found = true;
}
i += 1;
}
}
}
}
match grid[y][x] {
Cell::Empty => if occupied == 0 { Cell::Occupied } else { grid[y][x] },
Cell::Occupied => if occupied >= 5 { Cell::Empty } else { grid[y][x] },
_ => grid[y][x],
}
}
fn print_grid(grid: &Vec<Vec<Cell>>) -> () {
for row in grid {
for cell in row {
print!("{}", match cell {
Cell::Floor => ".",
Cell::Empty => "L",
Cell::Occupied => "#",
});
}
print!("\n");
}
print!("\n");
}
|
extern crate iref;
use iref::Iri;
fn main() -> Result<(), iref::Error> {
let iri = Iri::new("https://www.rust-lang.org/foo/bar?query#frag")?;
println!("scheme: {}", iri.scheme());
println!("authority: {}", iri.authority().unwrap());
println!("path: {}", iri.path());
println!("query: {}", iri.query().unwrap());
println!("fragment: {}", iri.fragment().unwrap());
Ok(())
}
|
pub mod file;
pub mod hash;
pub mod storage;
|
// This file is part of Substrate.
// Copyright (C) 2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
use crate::state_holder;
use sc_executor_common::error::WasmError;
use sp_wasm_interface::{Function, Value, ValueType};
use std::any::Any;
use wasmtime::{
Extern, ExternType, Func, FuncType, ImportType, Limits, Memory, MemoryType, Module, Store,
Trap, Val,
};
pub struct Imports {
/// Contains the index into `externs` where the memory import is stored if any. `None` if there
/// is none.
pub memory_import_index: Option<usize>,
pub externs: Vec<Extern>,
}
/// Goes over all imports of a module and prepares a vector of `Extern`s that can be used for
/// instantiation of the module. Returns an error if there are imports that cannot be satisfied.
pub fn resolve_imports(
store: &Store,
module: &Module,
host_functions: &[&'static dyn Function],
heap_pages: u32,
allow_missing_func_imports: bool,
) -> Result<Imports, WasmError> {
let mut externs = vec![];
let mut memory_import_index = None;
for import_ty in module.imports() {
if import_ty.module() != "env" {
return Err(WasmError::Other(format!(
"host doesn't provide any imports from non-env module: {}:{}",
import_ty.module(),
import_ty.name()
)))
}
let resolved = match import_ty.name() {
"memory" => {
memory_import_index = Some(externs.len());
resolve_memory_import(store, &import_ty, heap_pages)?
},
_ =>
resolve_func_import(store, &import_ty, host_functions, allow_missing_func_imports)?,
};
externs.push(resolved);
}
Ok(Imports { memory_import_index, externs })
}
fn resolve_memory_import(
store: &Store,
import_ty: &ImportType,
heap_pages: u32,
) -> Result<Extern, WasmError> {
let requested_memory_ty = match import_ty.ty() {
ExternType::Memory(memory_ty) => memory_ty,
_ =>
return Err(WasmError::Other(format!(
"this import must be of memory type: {}:{}",
import_ty.module(),
import_ty.name()
))),
};
// Increment the min (a.k.a initial) number of pages by `heap_pages` and check if it exceeds the
// maximum specified by the import.
let initial = requested_memory_ty.limits().min().saturating_add(heap_pages);
if let Some(max) = requested_memory_ty.limits().max() {
if initial > max {
return Err(WasmError::Other(format!(
"incremented number of pages by heap_pages (total={}) is more than maximum requested\
by the runtime wasm module {}",
initial,
max,
)))
}
}
let memory_ty = MemoryType::new(Limits::new(initial, requested_memory_ty.limits().max()));
let memory = Memory::new(store, memory_ty);
Ok(Extern::Memory(memory))
}
fn resolve_func_import(
store: &Store,
import_ty: &ImportType,
host_functions: &[&'static dyn Function],
allow_missing_func_imports: bool,
) -> Result<Extern, WasmError> {
let func_ty = match import_ty.ty() {
ExternType::Func(func_ty) => func_ty,
_ => {
return Err(WasmError::Other(format!(
"host doesn't provide any non function imports besides 'memory': {}:{}",
import_ty.module(),
import_ty.name()
)))
},
};
let host_func =
match host_functions.iter().find(|host_func| host_func.name() == import_ty.name()) {
Some(host_func) => host_func,
None if allow_missing_func_imports => {
return Ok(MissingHostFuncHandler::new(import_ty).into_extern(store, &func_ty))
},
None => {
return Err(WasmError::Other(format!(
"host doesn't provide such function: {}:{}",
import_ty.module(),
import_ty.name()
)))
},
};
if !signature_matches(&func_ty, &wasmtime_func_sig(*host_func)) {
return Err(WasmError::Other(format!(
"signature mismatch for: {}:{}",
import_ty.module(),
import_ty.name()
)))
}
Ok(HostFuncHandler::new(*host_func).into_extern(store))
}
/// Returns `true` if `lhs` and `rhs` represent the same signature.
fn signature_matches(lhs: &wasmtime::FuncType, rhs: &wasmtime::FuncType) -> bool {
lhs.params() == rhs.params() && lhs.results() == rhs.results()
}
/// This structure implements `Callable` and acts as a bridge between wasmtime and
/// substrate host functions.
struct HostFuncHandler {
host_func: &'static dyn Function,
}
fn call_static(
static_func: &'static dyn Function,
wasmtime_params: &[Val],
wasmtime_results: &mut [Val],
) -> Result<(), wasmtime::Trap> {
let unwind_result = state_holder::with_context(|host_ctx| {
let mut host_ctx = host_ctx.expect(
"host functions can be called only from wasm instance;
wasm instance is always called initializing context;
therefore host_ctx cannot be None;
qed
",
);
// `into_value` panics if it encounters a value that doesn't fit into the values
// available in substrate.
//
// This, however, cannot happen since the signature of this function is created from
// a `dyn Function` signature of which cannot have a non substrate value by definition.
let mut params = wasmtime_params.iter().cloned().map(into_value);
std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
static_func.execute(&mut host_ctx, &mut params)
}))
});
let execution_result = match unwind_result {
Ok(execution_result) => execution_result,
Err(err) => return Err(Trap::new(stringify_panic_payload(err))),
};
match execution_result {
Ok(Some(ret_val)) => {
debug_assert!(
wasmtime_results.len() == 1,
"wasmtime function signature, therefore the number of results, should always \
correspond to the number of results returned by the host function",
);
wasmtime_results[0] = into_wasmtime_val(ret_val);
Ok(())
},
Ok(None) => {
debug_assert!(
wasmtime_results.len() == 0,
"wasmtime function signature, therefore the number of results, should always \
correspond to the number of results returned by the host function",
);
Ok(())
},
Err(msg) => Err(Trap::new(msg)),
}
}
impl HostFuncHandler {
fn new(host_func: &'static dyn Function) -> Self {
Self { host_func }
}
fn into_extern(self, store: &Store) -> Extern {
let host_func = self.host_func;
let func_ty = wasmtime_func_sig(self.host_func);
let func = Func::new(store, func_ty, move |_, params, result| {
call_static(host_func, params, result)
});
Extern::Func(func)
}
}
/// A `Callable` handler for missing functions.
struct MissingHostFuncHandler {
module: String,
name: String,
}
impl MissingHostFuncHandler {
fn new(import_ty: &ImportType) -> Self {
Self { module: import_ty.module().to_string(), name: import_ty.name().to_string() }
}
fn into_extern(self, store: &Store, func_ty: &FuncType) -> Extern {
let Self { module, name } = self;
let func = Func::new(store, func_ty.clone(), move |_, _, _| {
Err(Trap::new(format!("call to a missing function {}:{}", module, name)))
});
Extern::Func(func)
}
}
fn wasmtime_func_sig(func: &dyn Function) -> wasmtime::FuncType {
let params = func
.signature()
.args
.iter()
.cloned()
.map(into_wasmtime_val_type)
.collect::<Vec<_>>()
.into_boxed_slice();
let results = func
.signature()
.return_value
.iter()
.cloned()
.map(into_wasmtime_val_type)
.collect::<Vec<_>>()
.into_boxed_slice();
wasmtime::FuncType::new(params, results)
}
fn into_wasmtime_val_type(val_ty: ValueType) -> wasmtime::ValType {
match val_ty {
ValueType::I32 => wasmtime::ValType::I32,
ValueType::I64 => wasmtime::ValType::I64,
ValueType::F32 => wasmtime::ValType::F32,
ValueType::F64 => wasmtime::ValType::F64,
}
}
/// Converts a `Val` into a substrate runtime interface `Value`.
///
/// Panics if the given value doesn't have a corresponding variant in `Value`.
pub fn into_value(val: Val) -> Value {
match val {
Val::I32(v) => Value::I32(v),
Val::I64(v) => Value::I64(v),
Val::F32(f_bits) => Value::F32(f_bits),
Val::F64(f_bits) => Value::F64(f_bits),
_ => panic!("Given value type is unsupported by substrate"),
}
}
pub fn into_wasmtime_val(value: Value) -> wasmtime::Val {
match value {
Value::I32(v) => Val::I32(v),
Value::I64(v) => Val::I64(v),
Value::F32(f_bits) => Val::F32(f_bits),
Value::F64(f_bits) => Val::F64(f_bits),
}
}
/// Attempt to convert a opaque panic payload to a string.
fn stringify_panic_payload(payload: Box<dyn Any + Send + 'static>) -> String {
match payload.downcast::<&'static str>() {
Ok(msg) => msg.to_string(),
Err(payload) => match payload.downcast::<String>() {
Ok(msg) => *msg,
// At least we tried...
Err(_) => "Box<Any>".to_string(),
},
}
}
|
// Copyright 2020 <盏一 w@hidva.com>
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::datums::Datums;
use crate::kbensure;
use crate::utils::fmgr::FmgrInfo;
use crate::utils::WorkerState;
use std::mem::{align_of, size_of};
use std::rc::Rc;
macro_rules! typbinop {
($ret: ident, $left: ident, $right: ident, $optyp: ty, $binop: ident) => {
let retdatum = Rc::make_mut($ret);
if $left.is_single() && $right.is_single() {
if $left.is_single_null() || $right.is_single_null() {
retdatum.set_single_null();
return Ok(());
}
let (retval, of) = $left
.get_single_fixedlen::<$optyp>()
.$binop($right.get_single_fixedlen::<$optyp>());
kbensure!(
!of,
ERRCODE_NUMERIC_VALUE_OUT_OF_RANGE,
"integer out of range"
);
retdatum.set_single_fixedlen(retval);
return Ok(());
}
if $left.is_single() {
retdatum.resize_fixedlen($right.len(), size_of::<$optyp>(), align_of::<$optyp>());
if $left.is_single_null() {
retdatum.set_null_all();
return Ok(());
}
retdatum.set_notnull_all();
let li32: $optyp = $left.get_single_fixedlen();
for idx in 0..$right.len() as isize {
if $right.is_null_at(idx) {
retdatum.set_null_at(idx);
} else {
let (reti32, of) = li32.$binop($right.get_fixedlen_at(idx));
kbensure!(
!of,
ERRCODE_NUMERIC_VALUE_OUT_OF_RANGE,
"integer out of range"
);
retdatum.set_fixedlen_at(idx, reti32);
}
}
return Ok(());
}
if $right.is_single() {
retdatum.resize_fixedlen($left.len(), size_of::<$optyp>(), align_of::<$optyp>());
if $right.is_single_null() {
retdatum.set_null_all();
return Ok(());
}
retdatum.set_notnull_all();
let li32 = $right.get_single_fixedlen();
for idx in 0..$left.len() as isize {
if $left.is_null_at(idx) {
retdatum.set_null_at(idx);
} else {
let (reti32, of) = $left.get_fixedlen_at::<$optyp>(idx).$binop(li32);
kbensure!(
!of,
ERRCODE_NUMERIC_VALUE_OUT_OF_RANGE,
"integer out of range"
);
retdatum.set_fixedlen_at(idx, reti32);
}
}
return Ok(());
}
debug_assert_eq!($left.len(), $right.len());
retdatum.resize_fixedlen($left.len(), size_of::<$optyp>(), align_of::<$optyp>());
retdatum.set_null_or($left, $right);
for idx in 0..$left.len() as isize {
if !retdatum.is_null_at(idx) {
let (retval, of) = $left
.get_fixedlen_at::<$optyp>(idx)
.$binop($right.get_fixedlen_at::<$optyp>(idx));
kbensure!(
!of,
ERRCODE_NUMERIC_VALUE_OUT_OF_RANGE,
"integer out of range"
);
retdatum.set_single_fixedlen(retval);
}
}
};
}
macro_rules! i32binop {
($ret: ident, $left: ident, $right: ident, $binop: ident) => {
typbinop!($ret, $left, $right, i32, $binop)
};
}
pub fn int4pl(
_flinfo: &FmgrInfo,
ret: &mut Rc<Datums>,
args: &[Rc<Datums>],
_state: &WorkerState,
) -> anyhow::Result<()> {
let left = &args[0];
let right = &args[1];
i32binop!(ret, left, right, overflowing_add);
return Ok(());
}
pub fn int4out(
_flinfo: &FmgrInfo,
ret: &mut Rc<Datums>,
args: &[Rc<Datums>],
_state: &WorkerState,
) -> anyhow::Result<()> {
let retdatum = Rc::make_mut(ret);
let arg = &args[0];
if arg.is_single() {
if arg.is_single_null() {
retdatum.set_single_null();
} else {
retdatum.set_single_varchar(arg.get_single_fixedlen::<i32>().to_string().as_bytes());
}
return Ok(());
}
retdatum.resize_varlen(arg.len());
retdatum.set_null_to(arg);
for idx in 0..arg.len() as isize {
if !arg.is_null_at(idx) {
retdatum.set_varchar_at(idx, arg.get_fixedlen_at::<i32>(idx).to_string().as_bytes());
} else {
retdatum.set_empty_at(idx);
}
}
return Ok(());
}
pub fn int4in(
_flinfo: &FmgrInfo,
ret: &mut Rc<Datums>,
args: &[Rc<Datums>],
_state: &WorkerState,
) -> anyhow::Result<()> {
let retdatum = Rc::make_mut(ret);
let arg = &args[0];
if arg.is_single() {
if arg.is_single_null() {
retdatum.set_single_null();
} else {
let inarg = arg.get_single_varchar();
let out: i32 = inarg.parse()?;
retdatum.set_single_fixedlen(out);
}
return Ok(());
}
retdatum.resize_fixedlen(arg.len(), size_of::<i32>(), align_of::<i32>());
retdatum.set_null_to(arg);
for idx in 0..arg.len() as isize {
if !arg.is_null_at(idx) {
let instr = arg.get_varchar_at(idx);
let out: i32 = instr.parse()?;
retdatum.set_fixedlen_at(idx, out);
}
}
return Ok(());
}
pub fn int4mi(
_flinfo: &FmgrInfo,
ret: &mut Rc<Datums>,
args: &[Rc<Datums>],
_state: &WorkerState,
) -> anyhow::Result<()> {
let left = &args[0];
let right = &args[1];
i32binop!(ret, left, right, overflowing_sub);
return Ok(());
}
pub fn int4div(
_flinfo: &FmgrInfo,
ret: &mut Rc<Datums>,
args: &[Rc<Datums>],
_state: &WorkerState,
) -> anyhow::Result<()> {
let left = &args[0];
let right = &args[1];
i32binop!(ret, left, right, overflowing_div);
return Ok(());
}
pub fn int4mul(
_flinfo: &FmgrInfo,
ret: &mut Rc<Datums>,
args: &[Rc<Datums>],
_state: &WorkerState,
) -> anyhow::Result<()> {
let left = &args[0];
let right = &args[1];
i32binop!(ret, left, right, overflowing_mul);
return Ok(());
}
|
extern crate protoc_rust_grpc;
fn main() {
protoc_rust_grpc::Codegen::new()
.out_dir("src")
.input("occlum_exec.proto")
.rust_protobuf(true)
.run()
.expect("protoc-rust-grpc");
println!("cargo:rustc-link-search=native=../../build/lib");
println!("cargo:rustc-link-lib=dylib=occlum-pal");
}
|
//! Concrete implementations for the traits in [crate::lp_format]
use std::fmt;
use std::fmt::Formatter;
use crate::lp_format::{AsVariable, Constraint, LpObjective, LpProblem, WriteToLpFileFormat};
/// A string that is a valid expression in the .lp format for the solver you are using
pub struct StrExpression(pub String);
/// A variable to optimize
pub struct Variable {
/// The variable name should be unique in the problem and have a name accepted by the solver
pub name: String,
/// Whether the variable is restricted to only integer values
pub is_integer: bool,
/// -INFINITY if there is no lower bound
pub lower_bound: f64,
/// INFINITY if there is no upper bound
pub upper_bound: f64,
}
impl WriteToLpFileFormat for StrExpression {
fn to_lp_file_format(&self, f: &mut Formatter) -> fmt::Result {
f.write_str(&self.0)
}
}
impl AsVariable for Variable {
fn name(&self) -> &str {
&self.name
}
fn is_integer(&self) -> bool {
self.is_integer
}
fn lower_bound(&self) -> f64 {
self.lower_bound
}
fn upper_bound(&self) -> f64 {
self.upper_bound
}
}
/// A concrete linear problem
pub struct Problem<EXPR = StrExpression, VAR = Variable> {
/// problem name. "lp_solvers_problem" by default
/// Write the problem in the lp file format to the given formatter
pub name: String,
/// Whether to maximize or minimize the objective
pub sense: LpObjective,
/// Target objective function
pub objective: EXPR,
/// Variables of the problem
pub variables: Vec<VAR>,
/// List of constraints to apply
pub constraints: Vec<Constraint<EXPR>>,
}
impl<'a, EXPR: 'a, VAR: 'a> LpProblem<'a> for Problem<EXPR, VAR>
where
&'a VAR: AsVariable,
&'a EXPR: WriteToLpFileFormat,
{
type Variable = &'a VAR;
type Expression = &'a EXPR;
type ConstraintIterator = Box<dyn Iterator<Item = Constraint<&'a EXPR>> + 'a>;
type VariableIterator = std::slice::Iter<'a, VAR>;
fn name(&self) -> &str {
&self.name
}
fn variables(&'a self) -> Self::VariableIterator {
self.variables.iter()
}
fn objective(&'a self) -> Self::Expression {
&self.objective
}
fn sense(&self) -> LpObjective {
self.sense
}
fn constraints(&'a self) -> Self::ConstraintIterator {
Box::new(
self.constraints
.iter()
.map(|Constraint { lhs, operator, rhs }| Constraint {
lhs,
operator: *operator,
rhs: *rhs,
}),
)
}
}
|
use base64;
use std::fs::File;
use std::io::prelude::*;
use std::io::BufReader;
use transpose;
fn main() {
let input = File::open("6.txt").unwrap();
let result = decrypt(
&base64::decode(
&BufReader::new(input)
.lines()
.map(|x| x.unwrap())
.collect::<Vec<_>>()
.join(""),
)
.unwrap(),
);
let result = String::from_utf8(result).unwrap();
println!("{}", result);
}
fn decrypt(file: &[u8]) -> Vec<u8> {
// Let KEYSIZE be the guessed length of the key; try values from 2 to (say) 40.
let mut keysizes_to_try: Vec<_> = (2usize..40usize)
.map(|keysize| {
(
// For each KEYSIZE, take the first KEYSIZE worth of bytes,
// and the second KEYSIZE worth of bytes, and find the edit
// distance between them. Normalize this result by dividing by
// KEYSIZE.
file.clone()
.chunks_exact(keysize)
.collect::<Vec<_>>()
.chunks_exact(2)
.map(|x| hamming(x[0], x[1]) / keysize as u32)
.sum::<u32>(),
keysize,
)
})
.collect();
keysizes_to_try.sort();
// The KEYSIZE with the smallest normalized edit distance is probably the key.
// You could proceed perhaps with the smallest 2-3 KEYSIZE values.
// Or take 4 KEYSIZE blocks instead of 2 and average the distances.
keysizes_to_try
.iter()
.take(4)
.map(|(_distance, keysize)| *keysize)
.into_iter()
.map(|keysize| {
// i.e. for keysize 4:
// file: [0, 1, 2, 3,
// 4, 5, 6, 7,
// ...]
// and we want
// file: [0, 4, 8, 16, ...
// 1, 5, 9, 17, ...
// ...]
let height = file.len() / keysize; // Round down a bit
// Now that you probably know the KEYSIZE: break the ciphertext
// into blocks of KEYSIZE length.
let mut transposed_chunks = vec![0; keysize * height];
// Now transpose the blocks: make a block that is the first byte
// of every block, and a block that is the second byte of every
// block, and so on.
transpose::transpose(
&file[0..keysize * height],
&mut transposed_chunks,
keysize,
height,
);
let chunks = transposed_chunks.chunks(height);
// Solve each block as if it was single-character XOR
let solved_chunks = find_the_xor(chunks);
let key: Vec<u8> = solved_chunks.iter().map(|(_, _, k)| *k).collect();
// For each block, the single-byte XOR key that produces the best looking
// histogram is the repeating-key XOR key byte for that block.
// Put them together and you have the key.
let score: i64 = solved_chunks.iter().map(|(score, _, _)| *score).sum();
(score, repeating_xor(file, &key))
})
.max()
.unwrap()
.1
}
fn repeating_xor(one: &[u8], two: &[u8]) -> Vec<u8> {
one.iter()
.zip(two.iter().cycle())
.map(|(a, b)| a ^ b)
.collect()
}
fn hamming(one: &[u8], two: &[u8]) -> u32 {
assert_eq!(one.len(), two.len());
one.iter()
.zip(two)
.map(|(a, b)| (*a ^ *b).count_ones() as u32)
.sum()
}
fn score_ascii_byte(c: u8) -> i64 {
let c = if b'A' <= c && c <= b'Z' {
c - b'A' + b'a'
} else {
c
};
return match c as char {
'e' => 12,
't' | 'a' | 'o' => 8,
'i' | 'n' => 7,
's' | 'h' | 'r' => 6,
'd' | 'l' => 4,
'c' | 'u' => 3,
'm' | 'w' | 'f' | 'g' | 'y' | 'p' => 2,
'b' | 'v' | 'k' | ' ' => 1,
'j' | 'x' | 'q' | 'z' | '\n' => 0,
_ => -2,
};
}
fn score_ascii(string: Vec<u8>) -> i64 {
string.into_iter().map(|x| score_ascii_byte(x)).sum()
}
fn find_the_xor<'a>(strings: impl Iterator<Item = &'a [u8]>) -> Vec<(i64, String, u8)> {
strings
.map(|string| {
(0u8..std::u8::MAX)
.map(|i| {
let test: Vec<u8> = string.iter().map(|a| a ^ i).collect();
if let Ok(str) = String::from_utf8(test.clone()) {
if str.is_ascii() {
return (score_ascii(test), str, i.clone());
}
}
(0, "Not Found".into(), 0u8)
})
.max()
.unwrap()
})
.collect()
}
#[test]
fn test_hamming() {
assert_eq!(hamming(b"this is a test", b"wokka wokka!!!"), 37)
}
|
#![recursion_limit = "1024"]
#[macro_use]
extern crate error_chain;
extern crate rss;
extern crate reqwest;
extern crate lettre;
extern crate chrono;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
#[macro_use]
extern crate tera;
use std::io::Read;
use std::io::Write;
use std::fs::File;
use rss::Channel;
use rss::Item;
use lettre::email::EmailBuilder;
use lettre::transport::smtp::SmtpTransportBuilder;
use lettre::transport::smtp::authentication::Mechanism;
use lettre::transport::EmailTransport;
use tera::Tera;
use chrono::DateTime;
use chrono::Local;
mod errors {
error_chain! {
foreign_links {
Io(::std::io::Error);
Http(::reqwest::Error);
Rss(::rss::Error);
Json(::serde_json::Error);
Render(::tera::Error);
Mail(::lettre::email::error::Error);
Tranport(::lettre::transport::smtp::error::Error);
}
}
}
use errors::*;
#[derive(Serialize, Debug)]
struct SerItem {
title: Option<String>,
link: Option<String>,
description: Option<String>,
author: Option<String>,
pub_date: Option<String>,
}
#[derive(Deserialize, Debug)]
struct Config {
local_rss: String,
remote_rss: String,
subject: String,
from: String,
to: String,
password: String,
server: String,
}
struct RssContext {
raw: String,
channel: Channel,
}
impl RssContext {
pub fn from_url(url: &str) -> Result<RssContext> {
let resp = reqwest::get(url)?;
return RssContext::from_reader(resp);
}
pub fn from_file(filename: &str) -> Result<RssContext> {
let reader = File::open(filename)?;
return RssContext::from_reader(reader);
}
pub fn to_file(&self, filename: &str) -> Result<()> {
let mut writer = File::create(filename)?;
writer.write_all(self.raw.as_bytes())?;
return Ok(());
}
// return item a vector of Items which are in 'a' but not in 'b'.
pub fn diff(ctx_a: &RssContext, ctx_b: &RssContext) -> Vec<Item> {
let a = &ctx_a.channel.items;
let b = &ctx_b.channel.items;
let mut c = Vec::new();
for item_a in a {
if !b.contains(item_a) {
c.push(item_a.clone());
}
}
return c;
}
fn from_reader<R: Read>(mut reader: R) -> Result<RssContext> {
let mut body = String::new();
reader.read_to_string(&mut body)?;
let channel: rss::Channel = body.parse()?;
return Ok(RssContext {
raw: body,
channel: channel,
});
}
}
fn convert_pub_date(old: &Option<String>) -> Option<String> {
if let &Some(ref date_str) = old {
if let Ok(date) = DateTime::parse_from_rfc2822(&date_str) {
return Some(date
.with_timezone(&Local)
.format("%Y-%m-%d %H:%M:%S")
.to_string());
}
}
return old.clone();
}
fn convert_to_ser_items(items: &Vec<Item>) -> Vec<SerItem> {
let mut ser_items = Vec::new();
for item in items {
ser_items.push(SerItem {
title: item.title.clone(),
link: item.link.clone(),
description: item.description.clone(),
author: item.author.clone(),
pub_date: convert_pub_date(&item.pub_date),
})
}
return ser_items;
}
fn load_config(filename: &str) -> Result<Config> {
let mut reader = File::open(filename)?;
let mut content = String::new();
reader.read_to_string(&mut content)?;
let config: Config = serde_json::from_str(&content)?;
return Ok(config);
}
fn fetch_diff_items(local: &str, remote: &str) -> Result<(Vec<SerItem>, RssContext)> {
let new_ctx = RssContext::from_url(remote)?;
let old_ctx = RssContext::from_file(local)?;
let new_items = RssContext::diff(&new_ctx, &old_ctx);
if new_items.len() <= 0 {
return Ok((Vec::new(), new_ctx));
} else {
return Ok((convert_to_ser_items(&new_items), new_ctx));
}
}
fn render(templates: &str, tmpl_file: &str, items: &Vec<SerItem>) -> Result<String> {
let tera = compile_templates!(templates);
let mut tctx = tera::Context::new();
tctx.add("items", &items);
let content = tera.render(tmpl_file, tctx)?;
return Ok(content);
}
fn send_mail(c: &Config, content: &String) -> Result<()> {
let email = EmailBuilder::new()
.subject(&c.subject)
.from(c.from.as_str())
.to((c.to.as_str(), "BBS Notification Receiver"))
.header(("Content-Type", "text/html; charset=UTF-8"))
.body(content)
.build()?;
let mut sender = SmtpTransportBuilder::new((c.server.as_str(), 25))?
.credentials(&c.from, &c.password)
.smtp_utf8(true)
.authentication_mechanism(Mechanism::Plain)
.build();
sender.send(email)?;
return Ok(());
}
fn run() -> Result<()> {
let config = load_config("bbsmon.json")?;
let (items, new_ctx) = fetch_diff_items(&config.local_rss, &config.remote_rss)?;
if items.len() <= 0 {
println!("new and old rss are same.");
return Ok(());
}
let content = render("templates/**/*", "mail.html", &items)?;
send_mail(&config, &content)?;
new_ctx.to_file("old-rss.xml")?;
return Ok(());
}
quick_main!(run);
|
#[doc = "Register `C2APB1FZR1` reader"]
pub type R = crate::R<C2APB1FZR1_SPEC>;
#[doc = "Register `C2APB1FZR1` writer"]
pub type W = crate::W<C2APB1FZR1_SPEC>;
#[doc = "Field `DBG_TIM2_STOP` reader - DBG_TIM2_STOP"]
pub type DBG_TIM2_STOP_R = crate::BitReader;
#[doc = "Field `DBG_TIM2_STOP` writer - DBG_TIM2_STOP"]
pub type DBG_TIM2_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_RTC_STOP` reader - DBG_RTC_STOP"]
pub type DBG_RTC_STOP_R = crate::BitReader;
#[doc = "Field `DBG_RTC_STOP` writer - DBG_RTC_STOP"]
pub type DBG_RTC_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_IWDG_STOP` reader - DBG_IWDG_STOP"]
pub type DBG_IWDG_STOP_R = crate::BitReader;
#[doc = "Field `DBG_IWDG_STOP` writer - DBG_IWDG_STOP"]
pub type DBG_IWDG_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_I2C1_STOP` reader - DBG_I2C1_STOP"]
pub type DBG_I2C1_STOP_R = crate::BitReader;
#[doc = "Field `DBG_I2C1_STOP` writer - DBG_I2C1_STOP"]
pub type DBG_I2C1_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_I2C2_STOP` reader - DBG_I2C2_STOP"]
pub type DBG_I2C2_STOP_R = crate::BitReader;
#[doc = "Field `DBG_I2C2_STOP` writer - DBG_I2C2_STOP"]
pub type DBG_I2C2_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_I2C3_STOP` reader - DBG_I2C3_STOP"]
pub type DBG_I2C3_STOP_R = crate::BitReader;
#[doc = "Field `DBG_I2C3_STOP` writer - DBG_I2C3_STOP"]
pub type DBG_I2C3_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `DBG_LPTIM1_STOP` reader - DBG_LPTIM1_STOP"]
pub type DBG_LPTIM1_STOP_R = crate::BitReader;
#[doc = "Field `DBG_LPTIM1_STOP` writer - DBG_LPTIM1_STOP"]
pub type DBG_LPTIM1_STOP_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bit 0 - DBG_TIM2_STOP"]
#[inline(always)]
pub fn dbg_tim2_stop(&self) -> DBG_TIM2_STOP_R {
DBG_TIM2_STOP_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 10 - DBG_RTC_STOP"]
#[inline(always)]
pub fn dbg_rtc_stop(&self) -> DBG_RTC_STOP_R {
DBG_RTC_STOP_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 12 - DBG_IWDG_STOP"]
#[inline(always)]
pub fn dbg_iwdg_stop(&self) -> DBG_IWDG_STOP_R {
DBG_IWDG_STOP_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 21 - DBG_I2C1_STOP"]
#[inline(always)]
pub fn dbg_i2c1_stop(&self) -> DBG_I2C1_STOP_R {
DBG_I2C1_STOP_R::new(((self.bits >> 21) & 1) != 0)
}
#[doc = "Bit 22 - DBG_I2C2_STOP"]
#[inline(always)]
pub fn dbg_i2c2_stop(&self) -> DBG_I2C2_STOP_R {
DBG_I2C2_STOP_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - DBG_I2C3_STOP"]
#[inline(always)]
pub fn dbg_i2c3_stop(&self) -> DBG_I2C3_STOP_R {
DBG_I2C3_STOP_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 31 - DBG_LPTIM1_STOP"]
#[inline(always)]
pub fn dbg_lptim1_stop(&self) -> DBG_LPTIM1_STOP_R {
DBG_LPTIM1_STOP_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - DBG_TIM2_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_tim2_stop(&mut self) -> DBG_TIM2_STOP_W<C2APB1FZR1_SPEC, 0> {
DBG_TIM2_STOP_W::new(self)
}
#[doc = "Bit 10 - DBG_RTC_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_rtc_stop(&mut self) -> DBG_RTC_STOP_W<C2APB1FZR1_SPEC, 10> {
DBG_RTC_STOP_W::new(self)
}
#[doc = "Bit 12 - DBG_IWDG_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_iwdg_stop(&mut self) -> DBG_IWDG_STOP_W<C2APB1FZR1_SPEC, 12> {
DBG_IWDG_STOP_W::new(self)
}
#[doc = "Bit 21 - DBG_I2C1_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_i2c1_stop(&mut self) -> DBG_I2C1_STOP_W<C2APB1FZR1_SPEC, 21> {
DBG_I2C1_STOP_W::new(self)
}
#[doc = "Bit 22 - DBG_I2C2_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_i2c2_stop(&mut self) -> DBG_I2C2_STOP_W<C2APB1FZR1_SPEC, 22> {
DBG_I2C2_STOP_W::new(self)
}
#[doc = "Bit 23 - DBG_I2C3_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_i2c3_stop(&mut self) -> DBG_I2C3_STOP_W<C2APB1FZR1_SPEC, 23> {
DBG_I2C3_STOP_W::new(self)
}
#[doc = "Bit 31 - DBG_LPTIM1_STOP"]
#[inline(always)]
#[must_use]
pub fn dbg_lptim1_stop(&mut self) -> DBG_LPTIM1_STOP_W<C2APB1FZR1_SPEC, 31> {
DBG_LPTIM1_STOP_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DBGMCU CPU2 APB1 Peripheral Freeze Register 1 \\[dual core device\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`c2apb1fzr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`c2apb1fzr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct C2APB1FZR1_SPEC;
impl crate::RegisterSpec for C2APB1FZR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`c2apb1fzr1::R`](R) reader structure"]
impl crate::Readable for C2APB1FZR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`c2apb1fzr1::W`](W) writer structure"]
impl crate::Writable for C2APB1FZR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets C2APB1FZR1 to value 0"]
impl crate::Resettable for C2APB1FZR1_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#[macro_use]
extern crate log;
extern crate mongo_oplog;
extern crate mongo_driver;
mod utils;
use std::sync::mpsc;
use mongo_oplog::op_source;
use mongo_oplog::op;
#[ignore]
#[test]
fn test_op_source() {
utils::log_init();
let pool = utils::get_mongo();
let (rx, join_handle) = op_source::create_oplog_receiver(pool);
// so that we can drop rx and it's iterator
{
let rx_iter = rx.iter().take(10);
for op in rx_iter {
trace!("{:?}", op);
}
}
drop(rx);
match join_handle.join() {
Err(err) => panic!(err),
Ok(_) => (),
}
}
/**
* this is here so that cargo test still compiles this module
* even though the above is ignored.
*/
#[test]
fn force_test_compile() {
utils::log_init();
debug!("force_test_compile of test_op_sources");
utils::get_mongo();
// this is here so that cargo test still compiles this module
// even though the above is ignored.
assert!(true, "this test module compiles");
}
|
use actix_web::dev::ServiceResponse;
use actix_web::middleware::ErrorHandlerResponse;
use actix_web::Result;
pub fn internal_server_error<B>(res: ServiceResponse<B>) -> Result<ErrorHandlerResponse<B>> {
eprintln!("INTERNAL_SERVER_ERROR: {:?}", res.request().uri());
Ok(ErrorHandlerResponse::Response(res))
}
pub fn not_found<B>(res: ServiceResponse<B>) -> Result<ErrorHandlerResponse<B>> {
eprintln!("NOT_FOUND: {:?}", res.request().uri());
Ok(ErrorHandlerResponse::Response(res))
}
|
/**
* [1] Main(search by title) // should be Videos instead
* [2] Count(search by id)
* [3] Video(search by id)
*/
// Should everything here be pub? or onyl used part?
// [1]
#[derive(Serialize, Deserialize, Debug)]
pub struct Main {
items: Option<Vec<VideosItem>>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct VideosItem { // should be VideosItem
id: VideosId,
snippet: Snippet,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct VideosId { // should be VideosId
videoId: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Snippet {
pub publishedAt: String,
pub channelId: String,
pub title: String,
pub description: Option<String>,
pub thumbnails: Thumbnails,
pub channelTitle: String,
pub tags: Option<Vec<String>>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Thumbnails {
pub default: Thumbnail,
pub high: Thumbnail,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Thumbnail {
pub url: String,
pub width: u16,
pub height: u16,
}
// [2]
#[derive(Serialize, Deserialize, Debug)]
pub struct Count {
pub items: Option<Vec<Item>>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Item {
pub statistics: Statistics,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct Statistics {
pub viewCount: String,
pub likeCount: String,
pub dislikeCount: String,
pub favoriteCount: String,
}
// [3]
#[derive(Serialize, Deserialize, Debug)]
pub struct Video {
pub items: Option<Vec<VideoItem>>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct VideoItem { // should be VideosItem
pub id: String,
pub snippet: Snippet,
}
|
use serde::{Deserialize, Serialize};
pub type PricesResponse = Vec<IntradayPrice>;
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all(deserialize = "camelCase"))]
pub struct IntradayPrice {
date: String,
minute: String,
label: String,
high: Option<f32>,
low: Option<f32>,
open: Option<f32>,
close: Option<f32>,
average: Option<f32>,
volume: Option<usize>,
notional: Option<f32>,
number_of_trades: usize,
change_over_time: Option<f32>,
}
|
//! Note: This crate is deprecated in favour of [rspotify](https://docs.rs/rspotify).
//!
//! aspotify is an asynchronous client to the [Spotify
//! API](https://developer.spotify.com/documentation/web-api/).
//!
//! # Examples
//! ```
//! # async {
//! use aspotify::{Client, ClientCredentials};
//!
//! // This from_env function tries to read the CLIENT_ID and CLIENT_SECRET environment variables.
//! // You can use the dotenv crate to read it from a file.
//! let credentials = ClientCredentials::from_env()
//! .expect("CLIENT_ID and CLIENT_SECRET not found.");
//!
//! // Create a Spotify client.
//! let client = Client::new(credentials);
//!
//! // Gets the album "Favourite Worst Nightmare" from Spotify, with no specified market.
//! let album = client.albums().get_album("1XkGORuUX2QGOEIL4EbJKm", None).await.unwrap();
//! # };
//! ```
//!
//! # Notes
//! - Spotify often imposes limits on endpoints, for example you can't get more than 50 tracks at
//! once. This crate removes this limit by making multiple requests when necessary.
#![forbid(unsafe_code)]
#![deny(rust_2018_idioms)]
#![warn(missing_docs, clippy::pedantic)]
#![allow(
clippy::module_name_repetitions,
clippy::non_ascii_literal,
clippy::items_after_statements,
clippy::filter_map
)]
#![cfg_attr(test, allow(clippy::float_cmp))]
use std::collections::HashMap;
use std::env::{self, VarError};
use std::error::Error as StdError;
use std::ffi::OsStr;
use std::fmt::{self, Display, Formatter};
use std::time::{Duration, Instant};
use reqwest::{header, RequestBuilder, Url};
use serde::de::DeserializeOwned;
use serde::{Deserialize, Serialize};
use tokio::sync::{Mutex, MutexGuard};
pub use authorization_url::*;
pub use endpoints::*;
/// Re-export from [`isocountry`].
pub use isocountry::CountryCode;
/// Re-export from [`isolanguage_1`].
pub use isolanguage_1::LanguageCode;
pub use model::*;
mod authorization_url;
pub mod endpoints;
pub mod model;
mod util;
/// A client to the Spotify API.
///
/// By default it will use the [client credentials
/// flow](https://developer.spotify.com/documentation/general/guides/authorization-guide/#client-credentials-flow)
/// to send requests to the Spotify API. The [`set_refresh_token`](Client::set_refresh_token) and
/// [`redirected`](Client::redirected) methods tell it to use the [authorization code
/// flow](https://developer.spotify.com/documentation/general/guides/authorization-guide/#authorization-code-flow)
/// instead.
#[derive(Debug)]
pub struct Client {
/// Your Spotify client credentials.
pub credentials: ClientCredentials,
client: reqwest::Client,
cache: Mutex<AccessToken>,
debug: bool,
}
impl Client {
/// Create a new client from your Spotify client credentials.
#[must_use]
pub fn new(credentials: ClientCredentials) -> Self {
Self {
credentials,
client: reqwest::Client::new(),
cache: Mutex::new(AccessToken::new(None)),
debug: false,
}
}
/// Create a new client with your Spotify client credentials and a refresh token.
#[must_use]
pub fn with_refresh(credentials: ClientCredentials, refresh_token: String) -> Self {
Self {
credentials,
client: reqwest::Client::new(),
cache: Mutex::new(AccessToken::new(Some(refresh_token))),
debug: false,
}
}
/// Get the client's refresh token.
pub async fn refresh_token(&self) -> Option<String> {
self.cache.lock().await.refresh_token.clone()
}
/// Set the client's refresh token.
pub async fn set_refresh_token(&self, refresh_token: Option<String>) {
self.cache.lock().await.refresh_token = refresh_token;
}
/// Get the client's access token values.
pub async fn current_access_token(&self) -> (String, Instant) {
let cache = self.cache.lock().await;
(cache.token.clone(), cache.expires)
}
/// Explicitly override the client's access token values. Useful if you acquire the
/// access token elsewhere.
pub async fn set_current_access_token(&self, token: String, expires: Instant) {
let mut cache = self.cache.lock().await;
cache.token = token;
cache.expires = expires;
}
async fn token_request(&self, params: TokenRequest<'_>) -> Result<AccessToken, Error> {
let request = self
.client
.post("https://accounts.spotify.com/api/token")
.basic_auth(&self.credentials.id, Some(&self.credentials.secret))
.form(¶ms)
.build()?;
if self.debug {
dbg!(&request, body_str(&request));
}
let response = self.client.execute(request).await?;
let status = response.status();
let text = response.text().await?;
if !status.is_success() {
if self.debug {
eprintln!(
"Authentication failed ({}). Response body is '{}'",
status, text
);
}
return Err(Error::Auth(serde_json::from_str(&text)?));
}
if self.debug {
dbg!(status);
eprintln!("Authentication response body is '{}'", text);
}
Ok(serde_json::from_str(&text)?)
}
/// Set the refresh token from the URL the client was redirected to and the state that was used
/// to send them there.
///
/// Use the [`authorization_url()`] function to generate the URL to which you can send the
/// client to to generate the URL here.
///
/// # Errors
///
/// Fails if the URL is invalid in some way, the state was incorrect for the URL or Spotify
/// fails.
pub async fn redirected(&self, url: &str, state: &str) -> Result<(), RedirectedError> {
let url = Url::parse(url)?;
let pairs: HashMap<_, _> = url.query_pairs().collect();
if pairs
.get("state")
.map_or(true, |url_state| url_state != state)
{
return Err(RedirectedError::IncorrectState);
}
if let Some(error) = pairs.get("error") {
return Err(RedirectedError::AuthFailed(error.to_string()));
}
let code = pairs
.get("code")
.ok_or_else(|| RedirectedError::AuthFailed(String::new()))?;
let token = self
.token_request(TokenRequest::AuthorizationCode {
code: &*code,
redirect_uri: &url[..url::Position::AfterPath],
})
.await?;
*self.cache.lock().await = token;
Ok(())
}
async fn access_token(&self) -> Result<MutexGuard<'_, AccessToken>, Error> {
let mut cache = self.cache.lock().await;
if Instant::now() >= cache.expires {
*cache = match cache.refresh_token.take() {
// Authorization code flow
Some(refresh_token) => {
let mut token = self
.token_request(TokenRequest::RefreshToken {
refresh_token: &refresh_token,
})
.await?;
token.refresh_token = Some(refresh_token);
token
}
// Client credentials flow
None => self.token_request(TokenRequest::ClientCredentials).await?,
}
}
Ok(cache)
}
async fn send_text(&self, request: RequestBuilder) -> Result<Response<String>, Error> {
let request = request
.bearer_auth(&self.access_token().await?.token)
.build()?;
if self.debug {
dbg!(&request, body_str(&request));
}
let response = loop {
let response = self.client.execute(request.try_clone().unwrap()).await?;
if response.status() != 429 {
break response;
}
let wait = response
.headers()
.get(header::RETRY_AFTER)
.and_then(|val| val.to_str().ok())
.and_then(|secs| secs.parse::<u64>().ok());
// 2 seconds is default retry after time; should never be used if the Spotify API and
// my code are both correct.
let wait = wait.unwrap_or(2);
tokio::time::sleep(std::time::Duration::from_secs(wait)).await;
};
let status = response.status();
let cache_control = Duration::from_secs(
response
.headers()
.get_all(header::CACHE_CONTROL)
.iter()
.filter_map(|value| value.to_str().ok())
.flat_map(|value| value.split(|c| c == ','))
.find_map(|value| {
let mut parts = value.trim().splitn(2, '=');
if parts.next().unwrap().eq_ignore_ascii_case("max-age") {
parts.next().and_then(|max| max.parse::<u64>().ok())
} else {
None
}
})
.unwrap_or_default(),
);
let data = response.text().await?;
if !status.is_success() {
if self.debug {
eprintln!("Failed ({}). Response body is '{}'", status, data);
}
return Err(Error::Endpoint(serde_json::from_str(&data)?));
}
if self.debug {
dbg!(status);
eprintln!("Response body is '{}'", data);
}
Ok(Response {
data,
expires: Instant::now() + cache_control,
})
}
async fn send_empty(&self, request: RequestBuilder) -> Result<(), Error> {
self.send_text(request).await?;
Ok(())
}
async fn send_opt_json<T: DeserializeOwned>(
&self,
request: RequestBuilder,
) -> Result<Response<Option<T>>, Error> {
let res = self.send_text(request).await?;
Ok(Response {
data: if res.data.is_empty() {
None
} else {
serde_json::from_str(&res.data)?
},
expires: res.expires,
})
}
async fn send_json<T: DeserializeOwned>(
&self,
request: RequestBuilder,
) -> Result<Response<T>, Error> {
let res = self.send_text(request).await?;
Ok(Response {
data: serde_json::from_str(&res.data)?,
expires: res.expires,
})
}
async fn send_snapshot_id(&self, request: RequestBuilder) -> Result<String, Error> {
#[derive(Deserialize)]
struct SnapshotId {
snapshot_id: String,
}
Ok(self
.send_json::<SnapshotId>(request)
.await?
.data
.snapshot_id)
}
}
/// The result of a request to a Spotify endpoint.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Response<T> {
/// The data itself.
pub data: T,
/// When the cache expires.
pub expires: Instant,
}
impl<T> Response<T> {
/// Map the contained data if there is any.
pub fn map<U>(self, f: impl FnOnce(T) -> U) -> Response<U> {
Response {
data: f(self.data),
expires: self.expires,
}
}
}
/// An object that holds your Spotify Client ID and Client Secret.
///
/// See [the Spotify guide on Spotify
/// apps](https://developer.spotify.com/documentation/general/guides/app-settings/) for how to get
/// these.
///
/// # Examples
///
/// ```no_run
/// use aspotify::ClientCredentials;
///
/// // Create from inside the program.
/// let credentials = ClientCredentials {
/// id: "your client id here".to_owned(),
/// secret: "your client secret here".to_owned()
/// };
///
/// // Create from CLIENT_ID and CLIENT_SECRET environment variables
/// let credentials = ClientCredentials::from_env()
/// .expect("CLIENT_ID or CLIENT_SECRET environment variables not set");
///
/// // Or use custom env var names
/// let credentials = ClientCredentials::from_env_vars("SPOTIFY_ID", "SPOTIFY_SECRET")
/// .expect("SPOTIFY_ID or SPOTIFY_SECRET environment variables not set");
/// ```
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ClientCredentials {
/// The Client ID.
pub id: String,
/// The Client Secret.
pub secret: String,
}
impl ClientCredentials {
/// Attempts to create a `ClientCredentials` by reading environment variables.
///
/// # Errors
///
/// Fails if the environment variables are not present or are not unicode.
pub fn from_env_vars<I: AsRef<OsStr>, S: AsRef<OsStr>>(
client_id: I,
client_secret: S,
) -> Result<Self, VarError> {
Ok(Self {
id: env::var(client_id)?,
secret: env::var(client_secret)?,
})
}
/// Attempts to create a `ClientCredentials` by reading the `CLIENT_ID` and `CLIENT_SECRET`
/// environment variables.
///
/// Equivalent to `ClientCredentials::from_env_vars("CLIENT_ID", "CLIENT_SECRET")`.
///
/// # Errors
///
/// Fails if the environment variables are not present or are not unicode.
pub fn from_env() -> Result<Self, VarError> {
Self::from_env_vars("CLIENT_ID", "CLIENT_SECRET")
}
}
/// An error caused by the [`Client::redirected`] function.
#[derive(Debug)]
pub enum RedirectedError {
/// The URL is malformed.
InvalidUrl(url::ParseError),
/// The URL has no state parameter, or the state parameter was incorrect.
IncorrectState,
/// The user has not accepted the request or an error occured in Spotify.
///
/// This contains the string returned by Spotify in the `error` parameter.
AuthFailed(String),
/// An error occurred getting the access token.
Token(Error),
}
impl From<url::ParseError> for RedirectedError {
fn from(error: url::ParseError) -> Self {
Self::InvalidUrl(error)
}
}
impl From<Error> for RedirectedError {
fn from(error: Error) -> Self {
Self::Token(error)
}
}
impl Display for RedirectedError {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
Self::InvalidUrl(_) => f.write_str("malformed redirect URL"),
Self::IncorrectState => f.write_str("state parameter not found or is incorrect"),
Self::AuthFailed(_) => f.write_str("authorization failed"),
Self::Token(e) => e.fmt(f),
}
}
}
impl StdError for RedirectedError {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
Some(match self {
Self::InvalidUrl(e) => e,
Self::Token(e) => e,
_ => return None,
})
}
}
#[derive(Debug, Serialize)]
#[serde(tag = "grant_type", rename_all = "snake_case")]
enum TokenRequest<'a> {
RefreshToken {
refresh_token: &'a String,
},
ClientCredentials,
AuthorizationCode {
code: &'a str,
redirect_uri: &'a str,
},
}
#[derive(Debug, Deserialize)]
struct AccessToken {
#[serde(rename = "access_token")]
token: String,
#[serde(
rename = "expires_in",
deserialize_with = "util::deserialize_instant_seconds"
)]
expires: Instant,
#[serde(default)]
refresh_token: Option<String>,
}
impl AccessToken {
fn new(refresh_token: Option<String>) -> Self {
Self {
token: String::new(),
expires: Instant::now() - Duration::from_secs(1),
refresh_token,
}
}
}
/// Get the contents of a request body as a string. This is only used for debugging purposes.
fn body_str(req: &reqwest::Request) -> Option<&str> {
req.body().map(|body| {
body.as_bytes().map_or("stream", |bytes| {
std::str::from_utf8(bytes).unwrap_or("opaque bytes")
})
})
}
|
use std::panic;
use rocket::{self, http::{Header, Status}, local::Client};
use diesel::connection::SimpleConnection;
use horus_server::{self, routes::files::*};
use test::{run_test, sql::*};
#[test]
fn get()
{
run(|| {
let client = get_client();
let req = client.get("/file/".to_string() + FILE_ID);
let mut response = req.dispatch();
assert_eq!(response.status(), Status::Ok);
assert!(response.body_string().unwrap().contains(FILE_NAME));
});
}
#[test]
fn new()
{
run(|| {
let client = get_client();
let filecontent = "dummycontent";
let new_fname = "filename123";
let req = client
.post("/file/new")
.header(auth_header())
.header(Header::new("content-type", "application/octet-stream"))
.header(Header::new("content-disposition", new_fname))
.body(filecontent);
let response = req.dispatch();
assert_eq!(response.status(), Status::Created);
let loc = response.headers().get_one("location").unwrap();
let mut response = client.get(loc).dispatch();
assert!(response.body_string().unwrap().contains(new_fname));
});
}
#[test]
fn new_exp()
{
run(|| {
let client = get_client();
let filecontent = "dummycontent";
let new_fname = "filename123";
let req = client
.post("/file/new/hours/1")
.header(auth_header())
.header(Header::new("content-type", "application/octet-stream"))
.header(Header::new("content-disposition", new_fname))
.body(filecontent);
let response = req.dispatch();
assert_eq!(response.status(), Status::Created);
let loc = response.headers().get_one("location").unwrap();
let mut response = client.get(loc).dispatch();
assert!(response.body_string().unwrap().contains(new_fname));
});
}
#[test]
fn delete()
{
run(|| {
let client = get_client();
let req = client
.delete("/file/".to_string() + FILE_ID)
.header(auth_header());
let response = req.dispatch();
assert_eq!(response.status(), Status::Ok);
let response = client.get("/file/".to_string() + FILE_ID).dispatch();
assert_eq!(response.status(), Status::NotFound);
});
}
#[test]
fn list()
{
run(|| {
let client = get_client();
let req = client
.post("/file/new")
.header(auth_header())
.header(Header::new("content-disposition", "fileabc"))
.header(Header::new("content-type", "application/octet-stream"));
let response = req.dispatch();
assert_eq!(response.status(), Status::Created);
let req = client
.get(format!("/file/{}/list/0", USER_ID))
.header(auth_header());
let mut response = req.dispatch();
assert_eq!(response.status(), Status::Ok);
let bs = response.body_string().unwrap();
assert!(bs.contains("fileabc"));
assert!(bs.contains(FILE_NAME));
});
}
fn run<T>(test: T) -> ()
where
T: FnOnce() -> () + panic::UnwindSafe,
{
run_test(test, setup_db, unsetup_db);
}
fn setup_db()
{
let conn = horus_server::dbtools::get_db_conn_requestless().unwrap();
let mut setup_sql = String::new();
setup_sql.push_str(sql_insert_user().as_str());
setup_sql.push_str(sql_insert_license().as_str());
setup_sql.push_str(sql_insert_file().as_str());
conn.batch_execute(&setup_sql).unwrap();
}
fn unsetup_db()
{
let conn = horus_server::dbtools::get_db_conn_requestless().unwrap();
// No need to delete everything, a user delete cascades.
let unsetup_sql = sql_delete_user();
conn.batch_execute(&unsetup_sql).unwrap();
}
fn get_client() -> Client
{
use rocket_contrib::Template;
let rocket = rocket::ignite()
.attach(Template::fairing())
.mount("/file", routes![get, list, new, new_exp, delete])
.manage(horus_server::dbtools::init_pool());
Client::new(rocket).expect("valid rocket instance")
}
|
use anyhow::Result;
use bevy::ecs::{
component::Component,
entity::{EntityMap, MapEntities},
world::{EntityMut, World},
};
///////////////////////////////////////////////////////////////////////////////
pub type MapWorldComponentsFn = fn(&mut World, &EntityMap) -> Result<()>;
pub type MapEntityComponentsFn = fn(&mut EntityMut, &EntityMap) -> Result<()>;
#[derive(Default)]
pub(crate) struct ComponentEntityMapperRegistry {
world: Vec<MapWorldComponentsFn>,
entity: Vec<MapEntityComponentsFn>,
}
impl ComponentEntityMapperRegistry {
/// Map entities on all registered components in the entire [`World`]
pub fn map_world_components(&self, world: &mut World, entity_map: &EntityMap) -> Result<()> {
for map in &self.world {
(map)(world, &entity_map)?;
}
Ok(())
}
/// Map entities on all registered components for a single [`Entity`]
pub fn map_entity_components(
&self,
entity: &mut EntityMut,
entity_map: &EntityMap,
) -> Result<()> {
for map in &self.entity {
(map)(entity, &entity_map)?;
}
Ok(())
}
/// Register a component that references other entities
pub fn register<T>(&mut self)
where
T: Component + MapEntities + Clone,
{
// maps entities all components in the world
self.world.push(|world, entity_map| {
let mut query = world.query::<&mut T>();
for mut component in query.iter_mut(world) {
component.map_entities(entity_map)?;
}
Ok(())
});
// maps entities in this component for a single entity
self.entity.push(|entity, entity_map| {
if let Some(mut component) = entity.get_mut::<T>() {
component.map_entities(entity_map)?;
}
Ok(())
});
}
}
|
use chrono::prelude::*;
use serde::{Deserialize, Serialize};
use super::{Genre, Language, ProductionCompany, ProductionCountry};
/// Details from searching for [`Movie`] by name
#[derive(Serialize, Deserialize, Debug)]
pub struct Movie {
/// The path to the poster for this movie
pub poster_path: Option<String>,
/// Whether this is an adult movie or not
pub adult: bool,
/// A brief overview of this Movie
pub overview: String,
/// When this movie was released
pub release_date: NaiveDate,
/// The genre IDs for this movie
#[serde(default)]
pub genre_ids: Vec<i64>,
/// The id for this movie
pub id: i64,
/// The original title for this movie
pub original_title: String,
/// The original language for this movie
pub original_lnguage: Option<String>,
/// The title of this movie
pub title: String,
/// The path to the backdrop for this movie
pub backdrop_path: Option<String>,
/// The popularity of this movie
pub popularity: f64,
/// The number of votes for this movie
pub vote_count: u64,
/// Whether this is not a movie but another type of longer video
pub video: bool,
/// The average vote for this movie
pub vote_average: f64,
}
/// A cursor from a movie search
#[derive(Serialize, Deserialize, Debug)]
pub struct MovieList {
/// What page in the search this cursor is for
pub page: u64,
/// The movies found
pub results: Vec<Movie>,
/// The total number of results found with this search
pub total_results: u64,
/// The total number of pages found with this search
pub total_pages: u64,
}
/// Details on a Movies
#[derive(Serialize, Deserialize, Debug)]
pub struct MovieDetails {
/// Whether this movie is an adult movie or not
pub adult: bool,
/// The path the backdrop for this movie can be found at
pub backdrop_path: Option<String>,
/// Whether this movie belongs to a collection or not
pub belongs_to_collection: Option<bool>,
/// The budget for this movie
pub budget: i64,
// The list of genres this movie is apart of
pub genres: Vec<Genre>,
/// The homepage for this Movie
pub homepage: Option<String>,
/// The id for this movie
pub id: i64,
/// The imdb ID for this movie
pub imdb_id: Option<String>,
/// The Original language this was in
pub original_language: String,
/// The original title fo this movie
pub original_title: String,
/// An overview for this movie
pub overview: Option<String>,
/// The popularity of this movie
pub popularity: f64,
/// The path to the poster for this movie
pub poster_path: Option<String>,
/// The production companies involved in making this movie
pub production_companies: Vec<ProductionCompany>,
/// The countries this movie was produced in
pub production_countries: Vec<ProductionCountry>,
/// When this movie was released
pub release_date: NaiveDate,
/// How much this movie made in revenue
pub revenue: i64,
/// The total runtime of this movie in minutes
pub runtime: Option<i64>,
/// The languages spoken in this movie
pub spoken_languages: Vec<Language>,
/// The current status of this movie
pub status: String,
/// The tag line or slogan for this movie
pub tagline: Option<String>,
/// The title of this movie
pub title: String,
/// Whether this is not a movie but another type of longer video
pub video: bool,
/// The average vote for this movie
pub vote_average: f64,
/// The number of votes for this movie
pub vote_count: i64,
}
|
#![allow(dead_code,unused_imports,unused_must_use,unused_assignments)]
#![allow(deprecated)]
#![feature(slicing_syntax)]
//! Run fuzzing against the `dns::hp` parser.
extern crate dns;
extern crate time;
use std::io::net::udp::UdpSocket;
use std::io::net::ip::{Ipv4Addr, SocketAddr};
use std::io::BufReader;
use std::io;
use std::rand::{Rng,StdRng,SeedableRng,random};
use std::sync::{Arc,Mutex};
//use dns::msg::{DNSMessageReader,Message};
//use dns::msg::record::{Question,ResourceRecord};
//use dns::number;
use dns::hp::read_dns_message;
fn main() {
println!("===================");
println!("Rust DNS Fuzz Tests");
println!("===================\n");
let mut buf = box [0u8, ..10000000u];
// Use random seed for actual binary
let mut rng: StdRng = StdRng::new().ok().unwrap();
rand_buf(buf.as_mut_slice(), &mut rng);
do_fuzz(buf.as_slice(), &mut rng, 10000000u);
let pkt = include_bin!("../../tests/packets/net1-rs.bin");
do_fuzz_rewrite(pkt, buf.as_mut_slice(), &mut rng, 200000u);
}
#[test]
fn test_dns_hp_fuzzing() {
let mut buf = [0u8, ..1000000u];
// Use a fixed seed for integration tests
let seed: &[_] = &[508, 53, 284, 224, 173, 23, 572, 634, 439, 983];
let mut rng: StdRng = SeedableRng::from_seed(seed);
rand_buf(buf.as_mut_slice(), &mut rng);
do_fuzz(buf.as_slice(), &mut rng, 1000000u);
}
#[test]
fn test_dns_hp_fuzzing_rewrite() {
let pkt = include_bin!("../../tests/packets/net1-rs.bin");
let mut buf = [0u8, ..1000000u];
// Use a fixed seed for integration tests
let seed: &[_] = &[508, 53, 284, 224, 173, 23, 572, 634, 439, 983];
let mut rng: StdRng = SeedableRng::from_seed(seed);
rand_buf(buf.as_mut_slice(), &mut rng);
do_fuzz_rewrite(pkt, buf.as_mut_slice(), &mut rng, 100000u);
}
fn rand_buf(buf: &mut [u8], rng: &mut StdRng) {
for i in range(0, buf.len()) {
buf[i] = rng.gen::<u8>();
}
}
fn rewrite(pkt: &[u8]) -> Vec<u8> {
let mut v: Vec<u8> = Vec::with_capacity(pkt.len());
for i in range(0, pkt.len()) {
v.push(pkt[i]);
}
v
}
fn do_fuzz_rewrite(pkt: &[u8], buf: &[u8], rng: &mut StdRng, iters: uint) {
println!("Fuzz-RW: {} bytes of random data generated", buf.len()); io::stdio::flush();
// Init counters
let start = time::precise_time_ns();
let mut oks = 0u;
let mut errs = 0u;
let mut tsz = 0u;
let plen = pkt.len();
for _ in range(0, iters) {
let mut pkt2 = rewrite(pkt);
// Generate start and end bounds
let s = rng.gen::<uint>() % plen;
let e = s+(rng.gen::<uint>() % (plen - s));
for j in range(s, e) {
pkt2[j] = random::<u8>();
}
// Test the current slice
let (oki, eri) = fuzz(pkt2.as_slice());
oks += oki;
errs += eri;
tsz += e-s;
}
// Calc results
let end = time::precise_time_ns();
let elapsed = (end - start) as f64 / 1000000000.;
let avg_size = (tsz) as f64 / iters as f64;
println!("Fuzz: completed {} iterations in {}s ({} ok/{} err/rw {} byte avg)", iters, elapsed, oks, errs, avg_size);
}
fn do_fuzz(buf: &[u8], rng: &mut StdRng, iters: uint) {
println!("Fuzz: {} bytes of random data generated", buf.len()); io::stdio::flush();
// Init counters
let start = time::precise_time_ns();
let mut oks = 0u;
let mut errs = 0u;
let mut tsz = 0u;
for i in range(0, iters) {
// Generate start and end bounds
let s = rng.gen::<uint>() % (buf.len() - 1025);
let e = s+rng.gen::<uint>() % 1024;
// Test the current slice
let (oki, eri) = fuzz(buf[s..e]);
i+1;
tsz += e - s;
oks += oki;
errs += eri;
}
// Calc results
let end = time::precise_time_ns();
let elapsed = (end - start) as f64 / 1000000000.;
let avg_size = (tsz) as f64 / iters as f64;
println!("Fuzz: completed {} iterations in {}s ({} ok/{} err/{} byte avg)", iters, elapsed, oks, errs, avg_size);
}
fn fuzz(buf: &[u8]) -> (uint, uint) {
let mut oks = 0u;
let mut errs = 0u;
let m = read_dns_message(buf);
match m {
Ok(_) => oks += 1,
Err(_) => errs += 1,
};
(oks, errs)
} |
#[doc = "Register `SR1` reader"]
pub type R = crate::R<SR1_SPEC>;
#[doc = "Register `SR1` writer"]
pub type W = crate::W<SR1_SPEC>;
#[doc = "Field `SB` reader - Start bit (Master mode)"]
pub type SB_R = crate::BitReader<SB_A>;
#[doc = "Start bit (Master mode)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SB_A {
#[doc = "0: No Start condition"]
NoStart = 0,
#[doc = "1: Start condition generated"]
Start = 1,
}
impl From<SB_A> for bool {
#[inline(always)]
fn from(variant: SB_A) -> Self {
variant as u8 != 0
}
}
impl SB_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SB_A {
match self.bits {
false => SB_A::NoStart,
true => SB_A::Start,
}
}
#[doc = "No Start condition"]
#[inline(always)]
pub fn is_no_start(&self) -> bool {
*self == SB_A::NoStart
}
#[doc = "Start condition generated"]
#[inline(always)]
pub fn is_start(&self) -> bool {
*self == SB_A::Start
}
}
#[doc = "Field `ADDR` reader - Address sent (master mode)/matched (slave mode)"]
pub type ADDR_R = crate::BitReader<ADDR_A>;
#[doc = "Address sent (master mode)/matched (slave mode)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ADDR_A {
#[doc = "0: Adress mismatched or not received"]
NotMatch = 0,
#[doc = "1: Received slave address matched with one of the enabled slave addresses"]
Match = 1,
}
impl From<ADDR_A> for bool {
#[inline(always)]
fn from(variant: ADDR_A) -> Self {
variant as u8 != 0
}
}
impl ADDR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ADDR_A {
match self.bits {
false => ADDR_A::NotMatch,
true => ADDR_A::Match,
}
}
#[doc = "Adress mismatched or not received"]
#[inline(always)]
pub fn is_not_match(&self) -> bool {
*self == ADDR_A::NotMatch
}
#[doc = "Received slave address matched with one of the enabled slave addresses"]
#[inline(always)]
pub fn is_match(&self) -> bool {
*self == ADDR_A::Match
}
}
#[doc = "Field `BTF` reader - Byte transfer finished"]
pub type BTF_R = crate::BitReader<BTF_A>;
#[doc = "Byte transfer finished\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BTF_A {
#[doc = "0: Data byte transfer not done"]
NotFinished = 0,
#[doc = "1: Data byte transfer successful"]
Finished = 1,
}
impl From<BTF_A> for bool {
#[inline(always)]
fn from(variant: BTF_A) -> Self {
variant as u8 != 0
}
}
impl BTF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BTF_A {
match self.bits {
false => BTF_A::NotFinished,
true => BTF_A::Finished,
}
}
#[doc = "Data byte transfer not done"]
#[inline(always)]
pub fn is_not_finished(&self) -> bool {
*self == BTF_A::NotFinished
}
#[doc = "Data byte transfer successful"]
#[inline(always)]
pub fn is_finished(&self) -> bool {
*self == BTF_A::Finished
}
}
#[doc = "Field `ADD10` reader - 10-bit header sent (Master mode)"]
pub type ADD10_R = crate::BitReader;
#[doc = "Field `STOPF` reader - Stop detection (slave mode)"]
pub type STOPF_R = crate::BitReader<STOPF_A>;
#[doc = "Stop detection (slave mode)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum STOPF_A {
#[doc = "0: No Stop condition detected"]
NoStop = 0,
#[doc = "1: Stop condition detected"]
Stop = 1,
}
impl From<STOPF_A> for bool {
#[inline(always)]
fn from(variant: STOPF_A) -> Self {
variant as u8 != 0
}
}
impl STOPF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> STOPF_A {
match self.bits {
false => STOPF_A::NoStop,
true => STOPF_A::Stop,
}
}
#[doc = "No Stop condition detected"]
#[inline(always)]
pub fn is_no_stop(&self) -> bool {
*self == STOPF_A::NoStop
}
#[doc = "Stop condition detected"]
#[inline(always)]
pub fn is_stop(&self) -> bool {
*self == STOPF_A::Stop
}
}
#[doc = "Field `RxNE` reader - Data register not empty (receivers)"]
pub type RX_NE_R = crate::BitReader<RX_NE_A>;
#[doc = "Data register not empty (receivers)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum RX_NE_A {
#[doc = "0: Data register empty"]
Empty = 0,
#[doc = "1: Data register not empty"]
NotEmpty = 1,
}
impl From<RX_NE_A> for bool {
#[inline(always)]
fn from(variant: RX_NE_A) -> Self {
variant as u8 != 0
}
}
impl RX_NE_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RX_NE_A {
match self.bits {
false => RX_NE_A::Empty,
true => RX_NE_A::NotEmpty,
}
}
#[doc = "Data register empty"]
#[inline(always)]
pub fn is_empty(&self) -> bool {
*self == RX_NE_A::Empty
}
#[doc = "Data register not empty"]
#[inline(always)]
pub fn is_not_empty(&self) -> bool {
*self == RX_NE_A::NotEmpty
}
}
#[doc = "Field `TxE` reader - Data register empty (transmitters)"]
pub type TX_E_R = crate::BitReader<TX_E_A>;
#[doc = "Data register empty (transmitters)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TX_E_A {
#[doc = "0: Data register not empty"]
NotEmpty = 0,
#[doc = "1: Data register empty"]
Empty = 1,
}
impl From<TX_E_A> for bool {
#[inline(always)]
fn from(variant: TX_E_A) -> Self {
variant as u8 != 0
}
}
impl TX_E_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TX_E_A {
match self.bits {
false => TX_E_A::NotEmpty,
true => TX_E_A::Empty,
}
}
#[doc = "Data register not empty"]
#[inline(always)]
pub fn is_not_empty(&self) -> bool {
*self == TX_E_A::NotEmpty
}
#[doc = "Data register empty"]
#[inline(always)]
pub fn is_empty(&self) -> bool {
*self == TX_E_A::Empty
}
}
#[doc = "Field `BERR` reader - Bus error"]
pub type BERR_R = crate::BitReader<BERRR_A>;
#[doc = "Bus error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BERRR_A {
#[doc = "0: No misplaced Start or Stop condition"]
NoError = 0,
#[doc = "1: Misplaced Start or Stop condition"]
Error = 1,
}
impl From<BERRR_A> for bool {
#[inline(always)]
fn from(variant: BERRR_A) -> Self {
variant as u8 != 0
}
}
impl BERR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BERRR_A {
match self.bits {
false => BERRR_A::NoError,
true => BERRR_A::Error,
}
}
#[doc = "No misplaced Start or Stop condition"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == BERRR_A::NoError
}
#[doc = "Misplaced Start or Stop condition"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == BERRR_A::Error
}
}
#[doc = "Bus error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum BERRW_AW {
#[doc = "0: Clear flag"]
Clear = 0,
}
impl From<BERRW_AW> for bool {
#[inline(always)]
fn from(variant: BERRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BERR` writer - Bus error"]
pub type BERR_W<'a, REG, const O: u8> = crate::BitWriter0C<'a, REG, O, BERRW_AW>;
impl<'a, REG, const O: u8> BERR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(BERRW_AW::Clear)
}
}
#[doc = "Field `ARLO` reader - Arbitration lost (master mode)"]
pub type ARLO_R = crate::BitReader<ARLOR_A>;
#[doc = "Arbitration lost (master mode)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ARLOR_A {
#[doc = "0: No Arbitration Lost detected"]
NoLost = 0,
#[doc = "1: Arbitration Lost detected"]
Lost = 1,
}
impl From<ARLOR_A> for bool {
#[inline(always)]
fn from(variant: ARLOR_A) -> Self {
variant as u8 != 0
}
}
impl ARLO_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ARLOR_A {
match self.bits {
false => ARLOR_A::NoLost,
true => ARLOR_A::Lost,
}
}
#[doc = "No Arbitration Lost detected"]
#[inline(always)]
pub fn is_no_lost(&self) -> bool {
*self == ARLOR_A::NoLost
}
#[doc = "Arbitration Lost detected"]
#[inline(always)]
pub fn is_lost(&self) -> bool {
*self == ARLOR_A::Lost
}
}
#[doc = "Arbitration lost (master mode)\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ARLOW_AW {
#[doc = "0: Clear flag"]
Clear = 0,
}
impl From<ARLOW_AW> for bool {
#[inline(always)]
fn from(variant: ARLOW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `ARLO` writer - Arbitration lost (master mode)"]
pub type ARLO_W<'a, REG, const O: u8> = crate::BitWriter0C<'a, REG, O, ARLOW_AW>;
impl<'a, REG, const O: u8> ARLO_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(ARLOW_AW::Clear)
}
}
#[doc = "Field `AF` reader - Acknowledge failure"]
pub type AF_R = crate::BitReader<AFR_A>;
#[doc = "Acknowledge failure\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum AFR_A {
#[doc = "0: No acknowledge failure"]
NoFailure = 0,
#[doc = "1: Acknowledge failure"]
Failure = 1,
}
impl From<AFR_A> for bool {
#[inline(always)]
fn from(variant: AFR_A) -> Self {
variant as u8 != 0
}
}
impl AF_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> AFR_A {
match self.bits {
false => AFR_A::NoFailure,
true => AFR_A::Failure,
}
}
#[doc = "No acknowledge failure"]
#[inline(always)]
pub fn is_no_failure(&self) -> bool {
*self == AFR_A::NoFailure
}
#[doc = "Acknowledge failure"]
#[inline(always)]
pub fn is_failure(&self) -> bool {
*self == AFR_A::Failure
}
}
#[doc = "Acknowledge failure\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum AFW_AW {
#[doc = "0: Clear flag"]
Clear = 0,
}
impl From<AFW_AW> for bool {
#[inline(always)]
fn from(variant: AFW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `AF` writer - Acknowledge failure"]
pub type AF_W<'a, REG, const O: u8> = crate::BitWriter0C<'a, REG, O, AFW_AW>;
impl<'a, REG, const O: u8> AF_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(AFW_AW::Clear)
}
}
#[doc = "Field `OVR` reader - Overrun/Underrun"]
pub type OVR_R = crate::BitReader<OVRR_A>;
#[doc = "Overrun/Underrun\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OVRR_A {
#[doc = "0: No overrun/underrun occured"]
NoOverrun = 0,
#[doc = "1: Overrun/underrun occured"]
Overrun = 1,
}
impl From<OVRR_A> for bool {
#[inline(always)]
fn from(variant: OVRR_A) -> Self {
variant as u8 != 0
}
}
impl OVR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> OVRR_A {
match self.bits {
false => OVRR_A::NoOverrun,
true => OVRR_A::Overrun,
}
}
#[doc = "No overrun/underrun occured"]
#[inline(always)]
pub fn is_no_overrun(&self) -> bool {
*self == OVRR_A::NoOverrun
}
#[doc = "Overrun/underrun occured"]
#[inline(always)]
pub fn is_overrun(&self) -> bool {
*self == OVRR_A::Overrun
}
}
#[doc = "Overrun/Underrun\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum OVRW_AW {
#[doc = "0: Clear flag"]
Clear = 0,
}
impl From<OVRW_AW> for bool {
#[inline(always)]
fn from(variant: OVRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `OVR` writer - Overrun/Underrun"]
pub type OVR_W<'a, REG, const O: u8> = crate::BitWriter0C<'a, REG, O, OVRW_AW>;
impl<'a, REG, const O: u8> OVR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(OVRW_AW::Clear)
}
}
#[doc = "Field `PECERR` reader - PEC Error in reception"]
pub type PECERR_R = crate::BitReader<PECERRR_A>;
#[doc = "PEC Error in reception\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PECERRR_A {
#[doc = "0: no PEC error: receiver returns ACK after PEC reception (if ACK=1)"]
NoError = 0,
#[doc = "1: PEC error: receiver returns NACK after PEC reception (whatever ACK)"]
Error = 1,
}
impl From<PECERRR_A> for bool {
#[inline(always)]
fn from(variant: PECERRR_A) -> Self {
variant as u8 != 0
}
}
impl PECERR_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PECERRR_A {
match self.bits {
false => PECERRR_A::NoError,
true => PECERRR_A::Error,
}
}
#[doc = "no PEC error: receiver returns ACK after PEC reception (if ACK=1)"]
#[inline(always)]
pub fn is_no_error(&self) -> bool {
*self == PECERRR_A::NoError
}
#[doc = "PEC error: receiver returns NACK after PEC reception (whatever ACK)"]
#[inline(always)]
pub fn is_error(&self) -> bool {
*self == PECERRR_A::Error
}
}
#[doc = "PEC Error in reception\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum PECERRW_AW {
#[doc = "0: Clear flag"]
Clear = 0,
}
impl From<PECERRW_AW> for bool {
#[inline(always)]
fn from(variant: PECERRW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `PECERR` writer - PEC Error in reception"]
pub type PECERR_W<'a, REG, const O: u8> = crate::BitWriter0C<'a, REG, O, PECERRW_AW>;
impl<'a, REG, const O: u8> PECERR_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(PECERRW_AW::Clear)
}
}
#[doc = "Field `TIMEOUT` reader - Timeout or Tlow error"]
pub type TIMEOUT_R = crate::BitReader<TIMEOUTR_A>;
#[doc = "Timeout or Tlow error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TIMEOUTR_A {
#[doc = "0: No Timeout error"]
NoTimeout = 0,
#[doc = "1: SCL remained LOW for 25 ms"]
Timeout = 1,
}
impl From<TIMEOUTR_A> for bool {
#[inline(always)]
fn from(variant: TIMEOUTR_A) -> Self {
variant as u8 != 0
}
}
impl TIMEOUT_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TIMEOUTR_A {
match self.bits {
false => TIMEOUTR_A::NoTimeout,
true => TIMEOUTR_A::Timeout,
}
}
#[doc = "No Timeout error"]
#[inline(always)]
pub fn is_no_timeout(&self) -> bool {
*self == TIMEOUTR_A::NoTimeout
}
#[doc = "SCL remained LOW for 25 ms"]
#[inline(always)]
pub fn is_timeout(&self) -> bool {
*self == TIMEOUTR_A::Timeout
}
}
#[doc = "Timeout or Tlow error\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TIMEOUTW_AW {
#[doc = "0: Clear flag"]
Clear = 0,
}
impl From<TIMEOUTW_AW> for bool {
#[inline(always)]
fn from(variant: TIMEOUTW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `TIMEOUT` writer - Timeout or Tlow error"]
pub type TIMEOUT_W<'a, REG, const O: u8> = crate::BitWriter0C<'a, REG, O, TIMEOUTW_AW>;
impl<'a, REG, const O: u8> TIMEOUT_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(TIMEOUTW_AW::Clear)
}
}
#[doc = "Field `SMBALERT` reader - SMBus alert"]
pub type SMBALERT_R = crate::BitReader<SMBALERTR_A>;
#[doc = "SMBus alert\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SMBALERTR_A {
#[doc = "0: No SMBALERT occured"]
NoAlert = 0,
#[doc = "1: SMBALERT occurred"]
Alert = 1,
}
impl From<SMBALERTR_A> for bool {
#[inline(always)]
fn from(variant: SMBALERTR_A) -> Self {
variant as u8 != 0
}
}
impl SMBALERT_R {
#[doc = "Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SMBALERTR_A {
match self.bits {
false => SMBALERTR_A::NoAlert,
true => SMBALERTR_A::Alert,
}
}
#[doc = "No SMBALERT occured"]
#[inline(always)]
pub fn is_no_alert(&self) -> bool {
*self == SMBALERTR_A::NoAlert
}
#[doc = "SMBALERT occurred"]
#[inline(always)]
pub fn is_alert(&self) -> bool {
*self == SMBALERTR_A::Alert
}
}
#[doc = "SMBus alert\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum SMBALERTW_AW {
#[doc = "0: Clear flag"]
Clear = 0,
}
impl From<SMBALERTW_AW> for bool {
#[inline(always)]
fn from(variant: SMBALERTW_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SMBALERT` writer - SMBus alert"]
pub type SMBALERT_W<'a, REG, const O: u8> = crate::BitWriter0C<'a, REG, O, SMBALERTW_AW>;
impl<'a, REG, const O: u8> SMBALERT_W<'a, REG, O>
where
REG: crate::Writable + crate::RegisterSpec,
{
#[doc = "Clear flag"]
#[inline(always)]
pub fn clear(self) -> &'a mut crate::W<REG> {
self.variant(SMBALERTW_AW::Clear)
}
}
impl R {
#[doc = "Bit 0 - Start bit (Master mode)"]
#[inline(always)]
pub fn sb(&self) -> SB_R {
SB_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - Address sent (master mode)/matched (slave mode)"]
#[inline(always)]
pub fn addr(&self) -> ADDR_R {
ADDR_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 2 - Byte transfer finished"]
#[inline(always)]
pub fn btf(&self) -> BTF_R {
BTF_R::new(((self.bits >> 2) & 1) != 0)
}
#[doc = "Bit 3 - 10-bit header sent (Master mode)"]
#[inline(always)]
pub fn add10(&self) -> ADD10_R {
ADD10_R::new(((self.bits >> 3) & 1) != 0)
}
#[doc = "Bit 4 - Stop detection (slave mode)"]
#[inline(always)]
pub fn stopf(&self) -> STOPF_R {
STOPF_R::new(((self.bits >> 4) & 1) != 0)
}
#[doc = "Bit 6 - Data register not empty (receivers)"]
#[inline(always)]
pub fn rx_ne(&self) -> RX_NE_R {
RX_NE_R::new(((self.bits >> 6) & 1) != 0)
}
#[doc = "Bit 7 - Data register empty (transmitters)"]
#[inline(always)]
pub fn tx_e(&self) -> TX_E_R {
TX_E_R::new(((self.bits >> 7) & 1) != 0)
}
#[doc = "Bit 8 - Bus error"]
#[inline(always)]
pub fn berr(&self) -> BERR_R {
BERR_R::new(((self.bits >> 8) & 1) != 0)
}
#[doc = "Bit 9 - Arbitration lost (master mode)"]
#[inline(always)]
pub fn arlo(&self) -> ARLO_R {
ARLO_R::new(((self.bits >> 9) & 1) != 0)
}
#[doc = "Bit 10 - Acknowledge failure"]
#[inline(always)]
pub fn af(&self) -> AF_R {
AF_R::new(((self.bits >> 10) & 1) != 0)
}
#[doc = "Bit 11 - Overrun/Underrun"]
#[inline(always)]
pub fn ovr(&self) -> OVR_R {
OVR_R::new(((self.bits >> 11) & 1) != 0)
}
#[doc = "Bit 12 - PEC Error in reception"]
#[inline(always)]
pub fn pecerr(&self) -> PECERR_R {
PECERR_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 14 - Timeout or Tlow error"]
#[inline(always)]
pub fn timeout(&self) -> TIMEOUT_R {
TIMEOUT_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - SMBus alert"]
#[inline(always)]
pub fn smbalert(&self) -> SMBALERT_R {
SMBALERT_R::new(((self.bits >> 15) & 1) != 0)
}
}
impl W {
#[doc = "Bit 8 - Bus error"]
#[inline(always)]
#[must_use]
pub fn berr(&mut self) -> BERR_W<SR1_SPEC, 8> {
BERR_W::new(self)
}
#[doc = "Bit 9 - Arbitration lost (master mode)"]
#[inline(always)]
#[must_use]
pub fn arlo(&mut self) -> ARLO_W<SR1_SPEC, 9> {
ARLO_W::new(self)
}
#[doc = "Bit 10 - Acknowledge failure"]
#[inline(always)]
#[must_use]
pub fn af(&mut self) -> AF_W<SR1_SPEC, 10> {
AF_W::new(self)
}
#[doc = "Bit 11 - Overrun/Underrun"]
#[inline(always)]
#[must_use]
pub fn ovr(&mut self) -> OVR_W<SR1_SPEC, 11> {
OVR_W::new(self)
}
#[doc = "Bit 12 - PEC Error in reception"]
#[inline(always)]
#[must_use]
pub fn pecerr(&mut self) -> PECERR_W<SR1_SPEC, 12> {
PECERR_W::new(self)
}
#[doc = "Bit 14 - Timeout or Tlow error"]
#[inline(always)]
#[must_use]
pub fn timeout(&mut self) -> TIMEOUT_W<SR1_SPEC, 14> {
TIMEOUT_W::new(self)
}
#[doc = "Bit 15 - SMBus alert"]
#[inline(always)]
#[must_use]
pub fn smbalert(&mut self) -> SMBALERT_W<SR1_SPEC, 15> {
SMBALERT_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "SR1\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`sr1::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`sr1::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct SR1_SPEC;
impl crate::RegisterSpec for SR1_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`sr1::R`](R) reader structure"]
impl crate::Readable for SR1_SPEC {}
#[doc = "`write(|w| ..)` method takes [`sr1::W`](W) writer structure"]
impl crate::Writable for SR1_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0xdf00;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets SR1 to value 0"]
impl crate::Resettable for SR1_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
// Simple 'hello' thread example.
// Mainly inspired from official book at
// http://doc.rust-lang.org/book/concurrency.html
use std::thread;
fn main() {
println!("Hello from main");
thread::spawn(|| {
println!("Hello from new thread");
});
}
|
#[macro_use]
extern crate neon;
extern crate neon_serde;
extern crate sodiumoxide;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
mod auth_token;
mod keyring;
use auth_token::AuthToken;
use keyring::Keyring;
use neon::prelude::*;
pub struct EccAuth {
keyring: Keyring
}
impl EccAuth {
fn new(keyring_dir_path: &str) -> EccAuth {
let keyring = Keyring::new(&keyring_dir_path);
EccAuth { keyring }
}
fn sign(raw_token: AuthToken) -> () {}
}
declare_types! {
pub class JsEccAuth for EccAuth {
init(mut cx) {
let keyring_dir_path: Handle<JsString> = cx.argument::<JsString>(0)?;
let ecc_auth = EccAuth::new(&keyring_dir_path.value());
Ok(ecc_auth)
}
method sign(mut cx) {
let token_obj = cx.argument::<JsValue>(0)?;
let raw_token: AuthToken = neon_serde::from_value(&mut cx, token_obj)?;
println!("Token is expired: {:?}", raw_token.is_expired());
println!("{:?}", raw_token);
Ok(cx.boolean(true).upcast())
}
// method showKey(mut cx) {
// // just an example of how to return data
// let pk = {
// let this = cx.this();
// let guard = &mut cx.lock();
// let ecc_auth = this.borrow(&guard);
// ecc_auth.keyring.public_key
// };
// println!("{:?}", pk);
// Ok(cx.boolean(true).upcast())
// }
}
}
register_module!(mut m, { m.export_class::<JsEccAuth>("EccAuth") });
|
use rust_decimal_macros::dec;
// Require using for reexportable feature
#[cfg(feature = "reexportable")]
use rust_decimal::Decimal;
#[test]
fn it_can_parse_decimal() {
let tests = &[
("0.00", dec!(0.00)),
("1.00", dec!(1.00)),
("-1.23", dec!(-1.23)),
("1.1234567890123456789012345678", dec!(1.1234567890123456789012345678)),
("1000000", dec!(1_000_000)),
("123", dec!(1.23e2)),
("123", dec!(1.23e+2)),
("-0.0123", dec!(-1.23e-2)),
("3.14", dec!(3.14e0)),
("12000", dec!(12e3)),
];
for &(a, b) in tests {
assert_eq!(a, b.to_string());
}
}
|
use std::fs::File;
use std::io::prelude::*;
use std::net::TcpListener;
use std::net::TcpStream;
fn main() {
let listener = TcpListener::bind("127.0.0.1:1234").unwrap();
for stream in listener.incoming() {
let stream = stream.unwrap(); // this is 1 connection
println!("Connection established!!");
handle_connection3(stream);
}
}
fn handle_connection3(mut stream: TcpStream) {
let mut buffer = [0; 1024];
stream.read(&mut buffer).unwrap();
let mut file = File::open("hello.html").unwrap();
let reply = "todo: reply here with data";
let response = format!("HTTP/1.1 200 OK\r\n\r\n{}", reply);
stream.write(response.as_bytes()).unwrap();
stream.flush().unwrap();
println!("Request: {}", String::from_utf8_lossy(&buffer[..]))
}
// ask KJ
fn handle_connection2(mut stream: TcpStream) {
// FIXME: why doesn't it work if i don't successfully read the buffer?
// let mut buffer = [0; 512];
// stream.read(&mut buffer).unwrap();
let mut file = File::open("hello.html").unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
let response = format!("HTTP/1.1 200 OK\r\n\r\n{}", contents);
stream.write(response.as_bytes()).unwrap();
stream.flush().unwrap();
// println!("Request: {}", String::from_utf8_lossy(&buffer[..]))
}
fn handle_connection(mut stream: TcpStream) {
let mut buffer = [0; 1024];
stream.read(&mut buffer).unwrap();
/**
* HTTP-Version Status-Code Reason-Phrase CRLF
headers CRLF
message-body
CRLF === \r\n
but because no headers, therefore CRLFCRLF --> \r\n\r\n
*/
let response = "HTTP/1.1 200 OK\r\n\r\n";
stream.write(response.as_bytes()).unwrap();
stream.flush().unwrap(); // flush will wait until all bytes have been written to the connection, or "flush until there's nothing left"
println!("Request: {}", String::from_utf8_lossy(&buffer[..]))
// holy shit http is a text protocol
// so if you sent a string with the right format it works!
}
|
#![no_main]
#[macro_use]
extern crate libfuzzer_sys;
extern crate valis_syntax;
fuzz_target!(|data: &[u8]| {
let tables = valis_syntax::SyntaxTables::default();
if let Ok(s) = std::str::from_utf8(data) {
let _ = valis_syntax::ast::SourceFileNode::parse(s, &tables);
}
});
|
use epd1in54::{DEFAULT_BACKGROUND_COLOR, HEIGHT, WIDTH};
/// Full size buffer for use with the 1in54 EPD
///
/// Can also be manuall constructed:
/// `buffer: [DEFAULT_BACKGROUND_COLOR.get_byte_value(); WIDTH / 8 * HEIGHT]`
pub struct Buffer1in54BlackWhite {
pub buffer: [u8; WIDTH as usize * HEIGHT as usize / 8],
}
impl Default for Buffer1in54BlackWhite {
fn default() -> Self {
Buffer1in54BlackWhite {
buffer: [DEFAULT_BACKGROUND_COLOR.get_byte_value();
WIDTH as usize * HEIGHT as usize / 8],
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use color::Color;
use embedded_graphics::coord::Coord;
use embedded_graphics::prelude::*;
use embedded_graphics::primitives::Line;
use graphics::{Display, DisplayRotation};
// test buffer length
#[test]
fn graphics_size() {
let mut display1in54 = Buffer1in54BlackWhite::default();
let display = Display::new(WIDTH, HEIGHT, &mut display1in54.buffer);
assert_eq!(display.buffer().len(), 5000);
}
// test default background color on all bytes
#[test]
fn graphics_default() {
let mut display1in54 = Buffer1in54BlackWhite::default();
let display = Display::new(WIDTH, HEIGHT, &mut display1in54.buffer);
for &byte in display.buffer() {
assert_eq!(byte, DEFAULT_BACKGROUND_COLOR.get_byte_value());
}
}
#[test]
fn graphics_rotation_0() {
let mut display1in54 = Buffer1in54BlackWhite::default();
let mut display = Display::new(WIDTH, HEIGHT, &mut display1in54.buffer);
display.draw(
Line::new(Coord::new(0, 0), Coord::new(7, 0))
.with_stroke(Some(Color::Black))
.into_iter(),
);
let buffer = display.buffer();
assert_eq!(buffer[0], Color::Black.get_byte_value());
for &byte in buffer.iter().skip(1) {
assert_eq!(byte, DEFAULT_BACKGROUND_COLOR.get_byte_value());
}
}
#[test]
fn graphics_rotation_90() {
let mut display1in54 = Buffer1in54BlackWhite::default();
let mut display = Display::new(WIDTH, HEIGHT, &mut display1in54.buffer);
display.set_rotation(DisplayRotation::Rotate90);
display.draw(
Line::new(Coord::new(0, 192), Coord::new(0, 199))
.with_stroke(Some(Color::Black))
.into_iter(),
);
let buffer = display.buffer();
assert_eq!(buffer[0], Color::Black.get_byte_value());
for &byte in buffer.iter().skip(1) {
assert_eq!(byte, DEFAULT_BACKGROUND_COLOR.get_byte_value());
}
}
#[test]
fn graphics_rotation_180() {
let mut display1in54 = Buffer1in54BlackWhite::default();
let mut display = Display::new(WIDTH, HEIGHT, &mut display1in54.buffer);
display.set_rotation(DisplayRotation::Rotate180);
display.draw(
Line::new(Coord::new(192, 199), Coord::new(199, 199))
.with_stroke(Some(Color::Black))
.into_iter(),
);
let buffer = display.buffer();
extern crate std;
std::println!("{:?}", buffer);
assert_eq!(buffer[0], Color::Black.get_byte_value());
for &byte in buffer.iter().skip(1) {
assert_eq!(byte, DEFAULT_BACKGROUND_COLOR.get_byte_value());
}
}
#[test]
fn graphics_rotation_270() {
let mut display1in54 = Buffer1in54BlackWhite::default();
let mut display = Display::new(WIDTH, HEIGHT, &mut display1in54.buffer);
display.set_rotation(DisplayRotation::Rotate270);
display.draw(
Line::new(Coord::new(199, 0), Coord::new(199, 7))
.with_stroke(Some(Color::Black))
.into_iter(),
);
let buffer = display.buffer();
extern crate std;
std::println!("{:?}", buffer);
assert_eq!(buffer[0], Color::Black.get_byte_value());
for &byte in buffer.iter().skip(1) {
assert_eq!(byte, DEFAULT_BACKGROUND_COLOR.get_byte_value());
}
}
}
|
pub mod http_handler;
pub mod file_manager;
pub mod thread_pool;
pub mod request;
|
/*
* Datadog API V1 Collection
*
* Collection of all Datadog Public endpoints.
*
* The version of the OpenAPI document: 1.0
* Contact: support@datadoghq.com
* Generated by: https://openapi-generator.tech
*/
/// WidgetTextAlign : How to align the text on the widget.
/// How to align the text on the widget.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum WidgetTextAlign {
#[serde(rename = "center")]
CENTER,
#[serde(rename = "left")]
LEFT,
#[serde(rename = "right")]
RIGHT,
}
impl ToString for WidgetTextAlign {
fn to_string(&self) -> String {
match self {
Self::CENTER => String::from("center"),
Self::LEFT => String::from("left"),
Self::RIGHT => String::from("right"),
}
}
}
|
use actix_web::*;
use database;
use serde_json;
use misc;
use msa;
use std::str;
use futures::future::Future;
use models::QuerySubstrate;
use flatten;
use csv;
use std::collections::HashMap;
use futures::Stream;
use std::fs::File;
use std::io::prelude::*;
pub fn get_status_controller(_req: HttpRequest<super::State>) -> HttpResponse {
let mut status : HashMap<&str,&str> = HashMap::new();
status.insert("status","alive");
status.insert("version","1.1.8");
let status_serialized = serde_json::to_string_pretty(&status).unwrap();
return HttpResponse::Ok().force_close().body(status_serialized);
}
pub fn get_info_controller(req: HttpRequest<super::State>) -> HttpResponse {
//get the value of ID
let id: String = req.match_info().query("id").unwrap();
//get the connection from pool
let conn;
match database::connect(&req.state().db_params) {
Ok(val) => {conn = val},
Err(error) => {return HttpResponse::InternalServerError().force_close().body(format!("{}",error));},
}
//get the id string
let info_result = database::get_info(&id,&conn);
match info_result {
Ok(info) => {
let info_serialized_result = serde_json::to_string_pretty(&info);
match info_serialized_result {
Ok(info_serialized) => {
HttpResponse::Ok()
.force_close()
.header(http::header::CONTENT_TYPE, "application/json")
.body(info_serialized)
},
Err(error) => {
HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error))
}
}
},
Err(error) => {
HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error))
}
}
}
pub fn search_controller(req: HttpRequest<super::State>) -> HttpResponse {
//get the connection from pool
let conn;
match database::connect(&req.state().db_params) {
Ok(val) => {conn = val},
Err(error) => {return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));},
}
//get content header
let content_header = misc::get_accept_header_value(&req);
//params
let search_term;
let term_type;
let role;
let mut paginate = false;
let limit;
let offset;
// search term
let search_term_option = req.query().get("search_term");
match search_term_option {
Some(val) => {
if !val.is_empty(){
search_term = val
}else{
return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body("search_term cannot be blank");
}
},
None => {return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body("search_term cannot be empty");}
}
// term type
let term_type_option = req.query().get("term_type");
match term_type_option {
Some(val) => {term_type=val},
None => {return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body("term_type cannot be empty");}
}
// role
let role_option = req.query().get("role");
match role_option {
Some(val) => {role=val},
None => {return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body("role cannot be empty");}
}
// ptm type
let ptm_types = misc::get_vec_str_from_param(req.query(),"ptm_type");
//build ptm labels
let mut ptm_labels_to_filter: Vec<String> = Vec::new();
if !ptm_types.is_empty(){
for ptm_type in ptm_types {
let ptm_label_option = misc::get_ptm_event_label(&ptm_type.to_lowercase());
match ptm_label_option {
Some(ptm_label) => {
ptm_labels_to_filter.push(ptm_label)
},
None => {
let error_msg = format!("invalid PTM type {}",ptm_type);
error!("{}",error_msg);
return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(error_msg);
}
}
};
}else{
ptm_labels_to_filter = misc::default_ptm_labels();
}
// Organism
let organism_taxon_codes;
match misc::get_vec_i32_from_param(req.query(),"organism") {
Ok(value) => {
organism_taxon_codes = value;
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error))
}
}
//paginate
let paginate_option = req.query().get("paginate");
match paginate_option {
Some(value) => {
if String::from(value).to_lowercase() == "true" {
paginate = true;
}else if String::from(value).to_lowercase() == "false" {
paginate = false;
}else{
error!("Invalid paginate option : {}",String::from(value));
}
},
None => {
paginate = false;
},
}
//start
if paginate {
let start_index;
let start_index_option = req.query().get("start_index");
match start_index_option {
Some(val) => {
match val.parse::<i32>() {
Ok(start_index_val) => {
start_index = start_index_val
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error))
},
}
},
None => {
return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body("start_index cannot be empty");
}
}
//end
let end_index;
let end_index_option = req.query().get("end_index");
match end_index_option {
Some(val) => {
match val.parse::<i32>() {
Ok(end_index_val) => {
end_index = end_index_val
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error))
},
}
},
None => {
return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body("end_index cannot be empty");
}
}
//calculate the limit and offset
limit = end_index - start_index;
if limit <= 0 {
{return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body("end_index cannot be smaller than or equal to start index");}
}
offset = start_index;
}else{
limit = 0;
offset = 0;
}
// perform the search
let search_values_result = database::search(search_term,term_type,role,&ptm_labels_to_filter,&organism_taxon_codes,paginate,offset,limit,&conn);
match search_values_result {
Ok(values) => {
let (count, search_values) = values;
if content_header == "application/json" || content_header.is_empty() {
let search_values_serialized_result = serde_json::to_string_pretty(&(*search_values));
match search_values_serialized_result {
Ok(search_values_serialized) => {
HttpResponse::Ok()
.force_close()
.header(http::header::CONTENT_TYPE, "application/json")
.header("count", count.to_string())
.body(search_values_serialized)
},
Err(error) => {
error!("{}",error);
HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error))
}
}
} else if content_header == "text/plain" {
//convert the values to flat structure
let search_results_flat = flatten::search_results(&(*search_values.borrow()));
let mut wtr = csv::Writer::from_writer(vec![]);
for search_result_flat in search_results_flat {
let result = wtr.serialize(&search_result_flat);
match result {
Ok(_) => {},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
}
let inner;
let inner_result = wtr.into_inner();
match inner_result {
Ok(value) => {inner=value;},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
let data_result = String::from_utf8(inner);
match data_result {
Ok(data) => {
return HttpResponse::Ok()
.force_close()
.header(http::header::CONTENT_TYPE, "text/csv")
.header("count", count.to_string())
.body(data);
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
}else {
return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("Invalid ACCEPT header - {}",content_header));
}
},
Err(error) => {
error!("{}",error);
HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error))
}
}
}
pub fn browse_controller(req: HttpRequest<super::State>) -> HttpResponse {
//get the connection from pool
let conn;
match database::connect(&req.state().db_params) {
Ok(val) => {conn = val},
Err(error) => {return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));},
}
//get content header
let content_header = misc::get_accept_header_value(&req);
//params
let search_term = "";
let term_type;
let role;
let paginate = true;
let limit;
let offset;
// term type
let term_type_option = req.query().get("term_type");
match term_type_option {
Some(val) => {term_type=val},
None => {return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body("term_type cannot be empty");}
}
// role
let role_option = req.query().get("role");
match role_option {
Some(val) => {role=val},
None => {return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body("role cannot be empty");}
}
// ptm type
let ptm_types = misc::get_vec_str_from_param(req.query(),"ptm_type");
//build ptm labels
let mut ptm_labels_to_filter: Vec<String> = Vec::new();
if !ptm_types.is_empty(){
for ptm_type in ptm_types {
let ptm_label_option = misc::get_ptm_event_label(&ptm_type.to_lowercase());
match ptm_label_option {
Some(ptm_label) => {
ptm_labels_to_filter.push(ptm_label)
},
None => {
let error_msg = format!("invalid PTM type {}",ptm_type);
error!("{}",error_msg);
return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(error_msg);
}
}
};
}else{
ptm_labels_to_filter = misc::default_ptm_labels();
}
// Organism
let organism_taxon_codes;
match misc::get_vec_i32_from_param(req.query(),"organism") {
Ok(value) => {
organism_taxon_codes = value;
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error))
}
}
let start_index;
let start_index_option = req.query().get("start_index");
match start_index_option {
Some(val) => {
match val.parse::<i32>() {
Ok(start_index_val) => {
start_index = start_index_val
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error))
},
}
},
None => {
return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body("start_index cannot be empty");
}
}
//end
let end_index;
let end_index_option = req.query().get("end_index");
match end_index_option {
Some(val) => {
match val.parse::<i32>() {
Ok(end_index_val) => {
end_index = end_index_val
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error))
},
}
},
None => {
return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body("end_index cannot be empty");
}
}
//calculate the limit and offset
limit = end_index - start_index;
if limit <= 0 {
{return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body("end_index cannot be smaller than or equal to start index");}
}
offset = start_index;
// perform the search
let search_values_result = database::search(search_term,term_type,role,&ptm_labels_to_filter,&organism_taxon_codes,paginate,offset,limit,&conn);
match search_values_result {
Ok(values) => {
let (count, search_values) = values;
if content_header == "application/json" || content_header.is_empty() {
let search_values_serialized_result = serde_json::to_string_pretty(&(*search_values));
match search_values_serialized_result {
Ok(search_values_serialized) => {
HttpResponse::Ok()
.force_close()
.header(http::header::CONTENT_TYPE, "application/json")
.header("count", count.to_string())
.body(search_values_serialized)
},
Err(error) => {
error!("{}",error);
HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error))
}
}
} else if content_header == "text/plain" {
//convert the values to flat structure
let search_results_flat = flatten::search_results(&(*search_values.borrow()));
let mut wtr = csv::Writer::from_writer(vec![]);
for search_result_flat in search_results_flat {
let result = wtr.serialize(&search_result_flat);
match result {
Ok(_) => {},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
}
let inner;
let inner_result = wtr.into_inner();
match inner_result {
Ok(value) => {inner=value;},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
let data_result = String::from_utf8(inner);
match data_result {
Ok(data) => {
return HttpResponse::Ok()
.force_close()
.header(http::header::CONTENT_TYPE, "text/csv")
.header("count", count.to_string())
.body(data);
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
}else {
return HttpResponse::BadRequest()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("Invalid ACCEPT header - {}",content_header));
}
},
Err(error) => {
HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error))
}
}
}
pub fn substrate_controller(req: HttpRequest<super::State>) -> HttpResponse {
//get the value of ID
let id: String = req.match_info().query("id").unwrap();
//get content header
let content_header = misc::get_accept_header_value(&req);
//get the connection from pool
let conn;
match database::connect(&req.state().db_params) {
Ok(val) => {conn = val},
Err(error) => {return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));},
}
//get the id strings
let substrate_events_results = database::get_substrate_events(&id,&conn);
//check if operation was successful
match substrate_events_results {
Ok(substrate_events) => {
if content_header == "application/json" || content_header.is_empty() {
//try deserializing
let substrate_events_serialized_result = serde_json::to_string_pretty(&substrate_events);
//check if operation was successful
match substrate_events_serialized_result {
Ok(substrate_events_serialized) => {
HttpResponse::Ok()
.force_close()
.header(http::header::CONTENT_TYPE, "application/json")
.body(substrate_events_serialized)
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
}else if content_header == "text/plain"{
//convert the values to flat structure
let substrate_events_flat = flatten::substrate_events(&substrate_events);
let mut wtr = csv::Writer::from_writer(vec![]);
for substrate_event_flat in substrate_events_flat {
let result = wtr.serialize(&substrate_event_flat);
match result {
Ok(_) => {},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
}
let inner;
let inner_result = wtr.into_inner();
match inner_result {
Ok(value) => {inner=value;},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
let data_result = String::from_utf8(inner);
match data_result {
Ok(data) => {
return HttpResponse::Ok().force_close().header(http::header::CONTENT_TYPE, "text/csv").body(data);
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
}else {
return HttpResponse::BadRequest().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("Invalid ACCEPT header - {}",content_header));
}
},
Err(error) => {
HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error))
}
}
}
pub fn as_enzyme_controller(req: HttpRequest<super::State>) -> HttpResponse {
//get the value of ID
let id: String = req.match_info().query("id").unwrap();
//get content header
let content_header = misc::get_accept_header_value(&req);
//get the connection from pool
let conn;
match database::connect(&req.state().db_params) {
Ok(val) => {conn = val},
Err(error) => {return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));},
}
//get the id strings
let enzyme_events_results = database::get_enzyme_events(&id,&conn);
//check if operation was successful
match enzyme_events_results {
Ok(enzyme_events) => {
if content_header == "application/json" || content_header.is_empty() {
//try deserializing
let events_serialized_result = serde_json::to_string_pretty(&enzyme_events);
//check if operation was successful
match events_serialized_result {
Ok(events_serialized) => {
HttpResponse::Ok()
.force_close()
.header(http::header::CONTENT_TYPE, "application/json")
.body(events_serialized)
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
}else if content_header == "text/plain"{
//convert the values to flat structure
let events_flat = flatten::enzyme_events(&enzyme_events);
let mut wtr = csv::Writer::from_writer(vec![]);
for event in events_flat {
let result = wtr.serialize(&event);
match result {
Ok(_) => {},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
}
let inner;
let inner_result = wtr.into_inner();
match inner_result {
Ok(value) => {inner=value;},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
let data_result = String::from_utf8(inner);
match data_result {
Ok(data) => {
return HttpResponse::Ok().force_close().header(http::header::CONTENT_TYPE, "text/csv").body(data);
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "text/plain")
.body(format!("{}",error));
}
}
}else {
return HttpResponse::BadRequest().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("Invalid ACCEPT header - {}",content_header));
}
},
Err(error) => {
HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error))
}
}
}
pub fn proteoforms_controller(req: HttpRequest<super::State>) -> HttpResponse {
//get the value of ID
let id: String = req.match_info().query("id").unwrap();
//get content header
let content_header = misc::get_accept_header_value(&req);
//get the connection from pool
let conn;
match database::connect(&req.state().db_params) {
Ok(val) => {conn = val},
Err(error) => {return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));},
}
//get the id strings
let proteoforms_result = database::get_proteoforms(&id,&conn);
//check if operation was successful
match proteoforms_result {
Ok(proteoforms) => {
if content_header == "application/json" || content_header.is_empty() {
//try deserializing
let proteoforms_serialized_result = serde_json::to_string_pretty(&proteoforms);
//check if operation was successful
match proteoforms_serialized_result {
Ok(proteoforms_serialized) => {
HttpResponse::Ok().force_close().header(http::header::CONTENT_TYPE, "application/json").body(proteoforms_serialized)
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
}else if content_header == "text/plain"{
//convert the values to flat structure
let protoroforms_flat = flatten::proteoform(&proteoforms);
let mut wtr = csv::Writer::from_writer(vec![]);
for proteoform_flat in protoroforms_flat {
let result = wtr.serialize(&proteoform_flat);
match result {
Ok(_) => {},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
}
let inner;
let inner_result = wtr.into_inner();
match inner_result {
Ok(value) => {inner=value;},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
let data_result = String::from_utf8(inner);
match data_result {
Ok(data) => {
return HttpResponse::Ok().force_close().body(data);
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
}else {
return HttpResponse::BadRequest().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("Invalid ACCEPT header - {}",content_header));
}
},
Err(error) => {
HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error))
}
}
}
pub fn proteoformsppi_controller(req: HttpRequest<super::State>) -> HttpResponse {
//get the value of ID
let id: String = req.match_info().query("id").unwrap();
//get content header
let content_header = misc::get_accept_header_value(&req);
//get the connection from pool
let conn;
match database::connect(&req.state().db_params) {
Ok(val) => {conn = val},
Err(error) => {return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));},
}
//get the id strings
let proteoforms_ppi_result = database::get_proteoformppis(&id,&conn);
//check if the operation was successful
match proteoforms_ppi_result {
Ok(proteoforms_ppi) => {
if content_header == "application/json" || content_header.is_empty() {
//try deserializing
let proteoforms_serialized_result = serde_json::to_string_pretty(&proteoforms_ppi);
//check if operation was successful
match proteoforms_serialized_result {
Ok(proteoforms_serialized) => {
HttpResponse::Ok().body(proteoforms_serialized)
},
Err(error) => {
HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "application/json").body(format!("{}",error))
}
}
}else if content_header == "text/plain" {
//convert the values to flat structure
let protoroforms_ppi_flat = flatten::proteoform_ppis(&proteoforms_ppi);
let mut wtr = csv::Writer::from_writer(vec![]);
for proteoform_ppi_flat in protoroforms_ppi_flat {
let result = wtr.serialize(&proteoform_ppi_flat);
match result {
Ok(_) => {},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
}
let inner;
let inner_result = wtr.into_inner();
match inner_result {
Ok(value) => {inner=value;},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
let data_result = String::from_utf8(inner);
match data_result {
Ok(data) => {
return HttpResponse::Ok().force_close().body(data);
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
}else {
return HttpResponse::BadRequest().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("Invalid ACCEPT header - {}",content_header));
}
},
Err(error) => {
HttpResponse::InternalServerError().header(http::header::CONTENT_TYPE, "text/plain").force_close().body(format!("{}",error))
}
}
}
pub fn ptmppi_controller(req: HttpRequest<super::State>) -> HttpResponse {
//get the value of ID
let id: String = req.match_info().query("id").unwrap();
//get content header
let content_header = misc::get_accept_header_value(&req);
//get the connection from pool
let conn;
match database::connect(&req.state().db_params) {
Ok(val) => {conn = val},
Err(error) => {return HttpResponse::InternalServerError().header(http::header::CONTENT_TYPE, "text/plain").force_close().body(format!("{}",error));},
}
//get the id strings
let ptmppi_result = database::get_ptmppis(&id,&conn);
//check if the operation was successful
match ptmppi_result {
Ok(ptmppis) => {
if content_header == "application/json"{
//try deserializing
let ptmppis_serialized_result = serde_json::to_string_pretty(&ptmppis);
//check if operation was successful
match ptmppis_serialized_result {
Ok(proteoforms_serialized) => {
HttpResponse::Ok().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(proteoforms_serialized)
},
Err(error) => {
HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error))
}
}
} else if content_header =="text/plain" {
//convert the values to flat structure
let ptm_ppi_flat = flatten::ptm_ppi(&ptmppis);
let mut wtr = csv::Writer::from_writer(vec![]);
for ptm_ppi_flat in ptm_ppi_flat {
let result = wtr.serialize(&ptm_ppi_flat);
match result {
Ok(_) => {},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
}
let inner;
let inner_result = wtr.into_inner();
match inner_result {
Ok(value) => {inner=value;},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
let data_result = String::from_utf8(inner);
match data_result {
Ok(data) => {
return HttpResponse::Ok().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(data);
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
}else {
return HttpResponse::BadRequest().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("Invalid ACCEPT header - {}",content_header));
}
},
Err(error) => {
HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error))
}
}
}
pub fn batch_ptm_enzymes_controller(req: HttpRequest<super::State>) -> Box<Future<Item=HttpResponse, Error=Error>> {
//get content header
let content_header = misc::get_accept_header_value(&req);
//get the connection
let conn_result = database::connect(&req.state().db_params);
req.concat2()
.from_err()
.and_then(move |body_bytes| {
let conn;
match conn_result {
Ok(val) => {conn = val},
Err(error) => {return Ok(HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));},
}
info!("Got database connection");
//read the bytes into str
let body_str;
let body_read_result = str::from_utf8(&body_bytes);
match body_read_result {
Ok(val) => {body_str = val},
Err(error) => {return Ok(HttpResponse::BadRequest().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));}
}
info!("Got raw srtring");
//parse the string
let query_substrates: Vec<QuerySubstrate>;
match serde_json::from_str(body_str) {
Ok(val) => {query_substrates = val},
Err(error) => {return Ok(HttpResponse::BadRequest().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));}
}
info!("parsed srtring");
//get the ptm enzymes
let ptm_enzymes_result = database::get_ptm_enzymes(&query_substrates,&conn);
info!("Got enzymes");
match ptm_enzymes_result {
Ok(ptm_enzymes) => {
if content_header == "application/json"{
info!("Serializing");
let ptm_enzymes_serialized_result = serde_json::to_string_pretty(&ptm_enzymes);
match ptm_enzymes_serialized_result {
Ok(ptm_enzymes_serialized) => {
info!("returned data");
return Ok(HttpResponse::Ok().force_close().header(http::header::CONTENT_TYPE, "application/json").body(ptm_enzymes_serialized));
},
Err(error) => {
error!("{}",error);
return Ok(HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));
}
}
}else if content_header == "text/plain" {
//convert the values to flat structure
let batch_ptm_enzymes_flat = flatten::batch_ptm_enzymes(&ptm_enzymes);
let mut wtr = csv::Writer::from_writer(vec![]);
for batch_ptm_enzyme_flat in batch_ptm_enzymes_flat {
let result = wtr.serialize(&batch_ptm_enzyme_flat);
match result {
Ok(_) => {},
Err(error) => {
error!("{}",error);
return Ok(HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));
}
}
}
let inner;
let inner_result = wtr.into_inner();
match inner_result {
Ok(value) => {inner=value;},
Err(error) => {
error!("{}",error);
return Ok(HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));
}
}
let data_result = String::from_utf8(inner);
match data_result {
Ok(data) => {
return Ok(HttpResponse::Ok().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(data));
},
Err(error) => {
error!("{}",error);
return Ok(HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));
}
}
}else {
return Ok(HttpResponse::BadRequest().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("Invalid ACCEPT header - {}",content_header)));
}
},
Err(error) => {
error!("{}",error);
return Ok(HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));
}
}
})
.responder()
}
pub fn batch_ptm_ppi_controller(req: HttpRequest<super::State>) -> Box<Future<Item=HttpResponse, Error=Error>> {
//get content header
let content_header = misc::get_accept_header_value(&req);
//get the connection
let conn_result = database::connect(&req.state().db_params);
req.concat2()
.from_err()
.and_then(move |body_bytes| {
//read the bytes into str
let body_str;
let body_read_result = str::from_utf8(&body_bytes);
match body_read_result {
Ok(val) => {body_str = val},
Err(error) => {return Ok(HttpResponse::BadRequest().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));}
}
//parse the string
let query_substrates: Vec<QuerySubstrate>;
match serde_json::from_str(body_str) {
Ok(val) => {query_substrates = val},
Err(error) => {return Ok(HttpResponse::BadRequest().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));}
}
let conn;
match conn_result {
Ok(val) => {conn = val},
Err(error) => {return Ok(HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));},
}
//get the ptm enzymes
let ptm_ppis_result = database::get_ptm_ppi(&query_substrates,&conn);
match ptm_ppis_result {
Ok(ptm_ppis) => {
if content_header == "application/json" {
let ptm_ppis_serialized_result = serde_json::to_string_pretty(&ptm_ppis);
match ptm_ppis_serialized_result {
Ok(ptm_ppis_serialized) => {
return Ok(HttpResponse::Ok().force_close().header(http::header::CONTENT_TYPE, "application/json").body(ptm_ppis_serialized));
},
Err(error) => {
return Ok(HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));
}
}
}else if content_header == "text/plain"{
//convert the values to flat structure
let batch_ptm_ppis_flat = flatten::batch_ptm_ppi(&ptm_ppis);
let mut wtr = csv::Writer::from_writer(vec![]);
for batch_ptm_ppis_flat in batch_ptm_ppis_flat {
let result = wtr.serialize(&batch_ptm_ppis_flat);
match result {
Ok(_) => {},
Err(error) => {
error!("{}",error);
return Ok(HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));
}
}
}
let inner;
let inner_result = wtr.into_inner();
match inner_result {
Ok(value) => {inner=value;},
Err(error) => {
error!("{}",error);
return Ok(HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));
}
}
let data_result = String::from_utf8(inner);
match data_result {
Ok(data) => {
return Ok(HttpResponse::Ok().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(data));
},
Err(error) => {
error!("{}",error);
return Ok(HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));
}
}
}else {
return Ok(HttpResponse::BadRequest().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("Invalid ACCEPT header - {}",content_header)));
}
},
Err(error) => {
return Ok(HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error)));
}
}
}).responder()
}
pub fn get_statistics_controller(_req: HttpRequest<super::State>) -> HttpResponse {
//Open the statistics file
let mut statistics_file;
match File::open("static/statistics.json") {
Ok(value) => {
statistics_file = value;
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
let mut contents = String::new();
match statistics_file.read_to_string(&mut contents) {
Ok(_) => {
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
return HttpResponse::Ok().force_close().body(contents);
}
pub fn get_msa_controller(req: HttpRequest<super::State>) -> HttpResponse {
//get the value of ID
let id: String = req.match_info().query("id").unwrap();
//get the connection from pool
let conn;
match database::connect(&req.state().db_params) {
Ok(val) => {conn = val},
Err(error) => {return HttpResponse::InternalServerError().force_close().body(format!("{}",error));},
}
//get the id string
let sequences_result = database::get_sequences(&id,&conn);
match sequences_result {
Ok(sequences) => {
let alignment_result = msa::align(&sequences);
match alignment_result {
Ok(alignment) => {
let decorate_result = msa::decorate(&id,&alignment,(&req.state().db_params).clone());
match decorate_result {
Ok(alignment_decorated) => {
let alignment_serialized_result = serde_json::to_string(&alignment_decorated);
match alignment_serialized_result {
Ok(alignment_serialized) => {
return HttpResponse::Ok()
.force_close()
.header(http::header::CONTENT_TYPE, "application/json")
.body(alignment_serialized);
},
Err(error) => {
let error_msg = format!("{}",error);
return HttpResponse::Ok()
.force_close()
.header(http::header::CONTENT_TYPE, "application/json")
.body(error_msg);
}
}
},
Err(error) => {
let error_msg = format!("{}",error);
return HttpResponse::Ok()
.force_close()
.header(http::header::CONTENT_TYPE, "application/json")
.body(error_msg);
}
}
},
Err(error) => {
let error_msg = format!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "application/json")
.body(error_msg);
}
}
},
Err(error) => {
let error_msg = format!("{}",error);
return HttpResponse::InternalServerError()
.force_close()
.header(http::header::CONTENT_TYPE, "application/json")
.body(error_msg);
},
}
}
pub fn get_variants(req: HttpRequest<super::State>) -> HttpResponse {
//get the value of ID
let id: String = req.match_info().query("id").unwrap();
//get content header
let content_header = misc::get_accept_header_value(&req);
//get the connection from pool
let conn;
match database::connect(&req.state().db_params) {
Ok(val) => {conn = val},
Err(error) => {return HttpResponse::InternalServerError().header(http::header::CONTENT_TYPE, "text/plain").force_close().body(format!("{}",error));},
}
//get the id strings
let variant_result = database::get_variants(&id,&conn);
//check if the operation was successful
match variant_result {
Ok(variants) => {
if content_header == "application/json"{
//try deserializing
let variants_serialized_result = serde_json::to_string_pretty(&variants);
//check if operation was successful
match variants_serialized_result {
Ok(proteoforms_serialized) => {
HttpResponse::Ok().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(proteoforms_serialized)
},
Err(error) => {
HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error))
}
}
} else if content_header =="text/plain" {
//convert the values to flat structure
let mut wtr = csv::Writer::from_writer(vec![]);
for variant_flat in variants {
let result = wtr.serialize(&variant_flat);
match result {
Ok(_) => {},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
}
let inner;
let inner_result = wtr.into_inner();
match inner_result {
Ok(value) => {inner=value;},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
let data_result = String::from_utf8(inner);
match data_result {
Ok(data) => {
return HttpResponse::Ok().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(data);
},
Err(error) => {
error!("{}",error);
return HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error));
}
}
}else {
return HttpResponse::BadRequest().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("Invalid ACCEPT header - {}",content_header));
}
},
Err(error) => {
HttpResponse::InternalServerError().force_close().header(http::header::CONTENT_TYPE, "text/plain").body(format!("{}",error))
}
}
} |
use a653rs::bindings::*;
pub struct DummyHypervisor;
impl ApexPartitionP4 for DummyHypervisor {
fn get_partition_status<L: Locked>() -> ApexPartitionStatus {
todo!()
}
fn set_partition_mode<L: Locked>(
_operating_mode: OperatingMode,
) -> Result<(), ErrorReturnCode> {
todo!()
}
}
impl ApexQueuingPortP1 for DummyHypervisor {
fn get_queuing_port_id<L: Locked>(
_queuing_port_name: QueuingPortName,
) -> Result<QueuingPortId, ErrorReturnCode> {
todo!()
}
}
impl ApexQueuingPortP4 for DummyHypervisor {
fn create_queuing_port<L: Locked>(
_queuing_port_name: QueuingPortName,
_max_message_size: MessageSize,
_max_nb_message: MessageRange,
_port_direction: PortDirection,
_queuing_discipline: QueuingDiscipline,
) -> Result<QueuingPortId, ErrorReturnCode> {
todo!()
}
fn send_queuing_message<L: Locked>(
_queuing_port_id: QueuingPortId,
_message: &[ApexByte],
_time_out: ApexSystemTime,
) -> Result<(), ErrorReturnCode> {
todo!()
}
unsafe fn receive_queuing_message<L: Locked>(
_queuing_port_id: QueuingPortId,
_time_out: ApexSystemTime,
_message: &mut [ApexByte],
) -> Result<MessageSize, ErrorReturnCode> {
todo!()
}
fn get_queuing_port_status<L: Locked>(
_queuing_port_id: QueuingPortId,
) -> Result<QueuingPortStatus, ErrorReturnCode> {
todo!()
}
fn clear_queuing_port<L: Locked>(
_queuing_port_id: QueuingPortId,
) -> Result<(), ErrorReturnCode> {
todo!()
}
}
impl ApexSamplingPortP4 for DummyHypervisor {
fn create_sampling_port<L: Locked>(
_sampling_port_name: SamplingPortName,
_max_message_size: MessageSize,
_port_direction: PortDirection,
_refresh_period: ApexSystemTime,
) -> Result<SamplingPortId, ErrorReturnCode> {
todo!()
}
fn write_sampling_message<L: Locked>(
_sampling_port_id: SamplingPortId,
_message: &[ApexByte],
) -> Result<(), ErrorReturnCode> {
todo!()
}
unsafe fn read_sampling_message<L: Locked>(
_sampling_port_id: SamplingPortId,
_message: &mut [ApexByte],
) -> Result<(Validity, MessageSize), ErrorReturnCode> {
todo!()
}
}
impl ApexSamplingPortP1 for DummyHypervisor {
fn get_sampling_port_id<L: Locked>(
_sampling_port_name: SamplingPortName,
) -> Result<SamplingPortId, ErrorReturnCode> {
todo!()
}
fn get_sampling_port_status<L: Locked>(
_sampling_port_id: SamplingPortId,
) -> Result<ApexSamplingPortStatus, ErrorReturnCode> {
todo!()
}
}
impl ApexProcessP4 for DummyHypervisor {
fn create_process<L: Locked>(
_attributes: &ApexProcessAttribute,
) -> Result<ProcessId, ErrorReturnCode> {
todo!()
}
fn start<L: Locked>(_process_id: ProcessId) -> Result<(), ErrorReturnCode> {
todo!()
}
}
impl ApexProcessP1 for DummyHypervisor {
fn set_priority<L: Locked>(
_process_id: ProcessId,
_priority: Priority,
) -> Result<(), ErrorReturnCode> {
todo!()
}
fn suspend_self<L: Locked>(_time_out: ApexSystemTime) -> Result<(), ErrorReturnCode> {
todo!()
}
fn suspend<L: Locked>(_process_id: ProcessId) -> Result<(), ErrorReturnCode> {
todo!()
}
fn resume<L: Locked>(_process_id: ProcessId) -> Result<(), ErrorReturnCode> {
todo!()
}
fn stop_self<L: Locked>() {
todo!()
}
fn stop<L: Locked>(_process_id: ProcessId) -> Result<(), ErrorReturnCode> {
todo!()
}
fn delayed_start<L: Locked>(
_process_id: ProcessId,
_delay_time: ApexSystemTime,
) -> Result<(), ErrorReturnCode> {
todo!()
}
fn lock_preemption<L: Locked>() -> Result<LockLevel, ErrorReturnCode> {
todo!()
}
fn unlock_preemption<L: Locked>() -> Result<LockLevel, ErrorReturnCode> {
todo!()
}
fn get_my_id<L: Locked>() -> Result<ProcessId, ErrorReturnCode> {
todo!()
}
fn get_process_id<L: Locked>(_process_name: ProcessName) -> Result<ProcessId, ErrorReturnCode> {
todo!()
}
fn get_process_status<L: Locked>(
_process_id: ProcessId,
) -> Result<ApexProcessStatus, ErrorReturnCode> {
todo!()
}
fn initialize_process_core_affinity<L: Locked>(
_process_id: ProcessId,
_processor_core_id: ProcessorCoreId,
) -> Result<(), ErrorReturnCode> {
todo!()
}
fn get_my_processor_core_id<L: Locked>() -> ProcessorCoreId {
todo!()
}
fn get_my_index<L: Locked>() -> Result<ProcessIndex, ErrorReturnCode> {
todo!()
}
}
impl ApexTimeP4 for DummyHypervisor {
fn periodic_wait() -> Result<(), ErrorReturnCode> {
todo!()
}
fn get_time() -> ApexSystemTime {
todo!()
}
}
impl ApexErrorP4 for DummyHypervisor {
fn report_application_message<L: Locked>(_message: &[ApexByte]) -> Result<(), ErrorReturnCode> {
todo!()
}
fn raise_application_error<L: Locked>(
_error_code: ErrorCode,
_message: &[ApexByte],
) -> Result<(), ErrorReturnCode> {
todo!()
}
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use air::proof::Queries;
use crypto::{ElementHasher, MerkleTree};
use math::FieldElement;
use utils::{batch_iter_mut, collections::Vec, uninit_vector};
#[cfg(feature = "concurrent")]
use utils::iterators::*;
// CONSTRAINT COMMITMENT
// ================================================================================================
pub struct ConstraintCommitment<E: FieldElement, H: ElementHasher<BaseField = E::BaseField>> {
evaluations: Vec<Vec<E>>,
commitment: MerkleTree<H>,
}
impl<E: FieldElement, H: ElementHasher<BaseField = E::BaseField>> ConstraintCommitment<E, H> {
/// Commits to the evaluations of the constraint composition polynomial by putting it into a
/// Merkle tree such that evaluations of all polynomial columns at the same x coordinate are
/// placed into a single leaf.
pub fn new(evaluations: Vec<Vec<E>>) -> ConstraintCommitment<E, H> {
assert!(
!evaluations.is_empty(),
"Constraint evaluations cannot be empty"
);
let column_size = evaluations[0].len();
assert!(
column_size.is_power_of_two(),
"evaluation column size must be a power of two"
);
for column in evaluations.iter() {
assert_eq!(
column_size,
column.len(),
"all evaluation columns must have the same length"
);
}
// hash evaluation table into a set of digests, one per row
let hashed_evaluations = hash_evaluations::<E, H>(&evaluations);
// build Merkle tree out of hashed evaluation values
let commitment = MerkleTree::new(hashed_evaluations)
.expect("failed to construct constraint Merkle tree");
ConstraintCommitment {
evaluations,
commitment,
}
}
/// Returns the root of the commitment Merkle tree.
pub fn root(&self) -> H::Digest {
*self.commitment.root()
}
/// Returns the depth of the commitment Merkle tree.
#[allow(unused)]
pub fn tree_depth(&self) -> usize {
self.commitment.depth()
}
/// Returns constraint evaluations at the specified positions along with Merkle authentication
/// paths from the root of the commitment to these evaluations.
pub fn query(self, positions: &[usize]) -> Queries {
// build Merkle authentication paths to the leaves specified by positions
let merkle_proof = self
.commitment
.prove_batch(positions)
.expect("failed to generate a Merkle proof for constraint queries");
// determine a set of evaluations corresponding to each position
let mut evaluations = Vec::new();
for &position in positions {
let mut row = vec![E::ZERO; self.evaluations.len()];
read_row(&self.evaluations, position, &mut row);
evaluations.push(row);
}
Queries::new(merkle_proof, evaluations)
}
}
// HELPER FUNCTIONS
// ================================================================================================
/// Computes hashes of evaluations grouped by row.
fn hash_evaluations<E, H>(evaluations: &[Vec<E>]) -> Vec<H::Digest>
where
E: FieldElement,
H: ElementHasher<BaseField = E::BaseField>,
{
let mut result = unsafe { uninit_vector::<H::Digest>(evaluations[0].len()) };
batch_iter_mut!(
&mut result,
128, // min batch size
|batch: &mut [H::Digest], batch_offset: usize| {
let mut row = vec![E::ZERO; evaluations.len()];
for (i, result) in batch.iter_mut().enumerate() {
read_row(evaluations, batch_offset + i, &mut row);
*result = H::hash_elements(&row);
}
}
);
result
}
#[inline]
fn read_row<E: FieldElement>(evaluations: &[Vec<E>], i: usize, row: &mut [E]) {
for (value, column) in row.iter_mut().zip(evaluations) {
*value = column[i];
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.