text stringlengths 8 4.13M |
|---|
mod tests;
mod serving;
use friday_vendor;
use vendor_scripts;
use friday_audio;
use friday_vad;
use friday_discovery;
use friday_logging;
use friday_signal;
use friday_inference;
use friday_inference::Model;
use tensorflow_models;
use friday_web::server::Server;
use std::sync::{Arc, Mutex};
fn main() {
// Tensorflow model that identifies keywords present in speech
let mut model = tensorflow_models::ddl::interface::DDL::new()
.expect("Failed to load model");
let recording_config = friday_audio::RecordingConfig::new(
16000,
model.expected_frame_size()).expect("Could not initialize 'RecordingConfig'");
// Input audio stream, this is shared with the recording web-vendor
let istream =
friday_audio::friday_cpal::CPALIStream::record(&recording_config)
.expect("Failed to start audio recording");
// For webserver & discovery
let port: u16 = 8000;
// Webserver that serves the GUI and also all of fridays endpoints
let mut server = Server::new().expect("Failed to create webserver");
// A separate thread for running discovery services
let discovery = friday_discovery::discovery::Discovery::new(port)
.expect("Failed to create discovery");
// vendors
let scripts_vendor = vendor_scripts::vendor::Scripts::new().expect("Failed to create 'Scripts' Vendor");
server.register(
vec![
// Webserver for model to serve its info
Arc::new(
Mutex::new(
tensorflow_models::ddl::interface::WebDDL::new(&model)
)
),
// Webserver for scripts vendor
Arc::new(
Mutex::new(
vendor_scripts::webvendor::WebScripts::new(&scripts_vendor)
)
),
// Webserver for discovery services: to set and get device name and pining remote server
// for discoverfriday.se
Arc::new(
Mutex::new(
friday_discovery::webvendor::WebDiscovery::new(&discovery)
)
),
// Webserver for recording and manipulating audio files on the assistant.
// Is used to add keywords through the API.
Arc::new(
Mutex::new(
friday_audio::web::record::api::WebRecord::new(
istream.clone()).expect("Failed to create WebRecord")
)
)
]
).expect("Failed to register vendors");
// Vendors that subscribe to keyword detections
let vendors: Vec<Box<dyn friday_vendor::Vendor>> = vec![
Box::new(scripts_vendor)
];
// Non-blocking webserver serving the web vendors
let web_handle = server.listen(format!("0.0.0.0:{}", port))
.expect("Failed to start webserver");
// Non-blocking discovery server (Tries to make it easy to discover the assistant)
let discovery_handle = discovery.make_discoverable();
// Cheap voice activity detection - if this one triggers we then trigger
// the tensorflow model
let mut vad = friday_vad::vad_peaks::PeakBasedDetector::new()
.expect("Failed to create VAD - PeakBasedDetector");
let signal_device = Box::new(friday_signal::mock::SilentMockDevice::new());
// Serve friday using the main thread
serving::serve_friday(&mut vad, &mut model, &vendors, istream, signal_device);
friday_logging::info!("Shutting Down Webserver.. Might take a few seconds");
web_handle.stop();
friday_logging::info!("Shutting Down Discovery Server.. Might take a few seconds");
discovery_handle.stop();
friday_logging::info!("All Done - Bye!");
}
|
use std::collections::HashSet;
use std::process::Command;
use std::io::Write;
use colored::*;
use chrono::{TimeZone, NaiveDateTime, Local};
use aur;
use aur::types::SearchResult;
// Search the AUR and core repositories for packages mentioning the given terms
pub fn search_packages<'a>(package_names: HashSet<&'a str>) -> (String, String) {
let pacman_results = get_pacman_results(&package_names);
let aur_results = get_aur_results(&package_names);
(pacman_results, aur_results)
}
fn get_pacman_results<'a, 'b>(package_names: &HashSet<&'a str>) -> String {
// TODO: Use libalpm instead of directly calling pacman
// TODO: Format pacman results
let mut pacman_search_command = Command::new("pacman");
pacman_search_command.arg("-Ss");
for name in package_names {
pacman_search_command.arg(name);
}
let pacman_output = pacman_search_command
.output()
.expect("Couldn't run pacman!")
.stdout;
String::from_utf8(pacman_output).unwrap()
}
fn get_aur_results<'a>(package_names: &HashSet<&'a str>) -> String {
let results = aur::search(package_names);
format_results(results)
}
fn format_results(results: Vec<(String, Result<SearchResult, String>)>) -> String {
let mut total_length = 0;
let formatted_results: Vec<String> = results
.into_iter()
.map(|result| match result {
Ok(package_result) => {
let mut total_length = 0;
let formatted_package_results: Vec<String> = package_result
.results
.iter()
.map(|package| {
let ref name = package.name;
let ref desc = package.description;
let ref url = package.url;
let ref version = package.version;
let maintainer = package.maintainer.clone().map(|m| m.normal()).unwrap_or(
"Orphaned"
.red(),
);
let outdated = package
.out_of_date
.map(|x| if x == 0 { false } else { true })
.unwrap_or(false);
let modified = Local.from_utc_datetime(&NaiveDateTime::from_timestamp(
package.last_modified,
0,
));
let mut formatted_result = Vec::new();
write!(&mut formatted_result, "{}", "[AUR]\t".bold().on_green());
write!(
&mut formatted_result,
"{} ({}): {}\n\tMaintainer: {}\tURL: {}\n\tModified: {}",
name.bold(),
version.blue(),
desc,
maintainer,
url.cyan(),
modified.to_string()
);
if outdated {
write!(&mut formatted_result, " {}", "(Out of date)".red());
}
write!(&mut formatted_result, "\n");
let formatted_result = match String::from_utf8(formatted_result) {
Ok(r) => r,
Err(e) => format!("{}", e),
};
total_length += formatted_result.len();
formatted_result
})
.collect();
let output = formatted_package_results.join("\n");
total_length += output.len();
output
}
Err(search_error) => format!("{}", search_error.bold().white().on_red()),
})
.collect();
formatted_results.join("\n")
}
|
mod proto;
// use bytes::BufMut;
// use bytes::{Bytes, BytesMut};
// use futures::stream::TryStreamExt;
use byteorder::{BigEndian, ByteOrder};
use bytes::BufMut;
use hyper::body;
use hyper::service::{make_service_fn, service_fn};
use hyper::HeaderMap;
use hyper::{Body, Request, Response, Server, Version};
use prost::Message;
use std::{convert::Infallible, net::SocketAddr};
async fn handle(req: Request<Body>) -> Result<Response<Body>, Infallible> {
// let (_parts, body) = req.into_parts();
// let entire_body = body
// .try_fold(Vec::new(), |mut data, chunk| async move {
// data.extend_from_slice(&chunk);
// Ok(data)
// })
// .await
// .unwrap();
// println!("{}", entire_body.len());
let bytes = body::to_bytes(req.into_body()).await.unwrap();
println!("{}", bytes.len());
// let mut builder = Response::builder();
// builder.header("Foo", "Bar");
// builder.body(());
let (mut sender, body) = Body::channel();
// let resp = Response::new()
// .with_header(ContentType("text/event-stream".parse().unwrap()))
// .with_header(CacheControl(vec![
// CacheDirective::NoStore,
// CacheDirective::Private,
// ]))
// .with_body(body);
let res = Response::builder().header("Foo", "Bar").body(body).unwrap();
sender
.send_data(hyper::body::Bytes::from("test\n"))
.await
.unwrap();
let mut trailers = HeaderMap::new();
// 'grpc-status': '0',
// 'grpc-message': 'OK',
trailers.insert("grpc-status", "0".parse().unwrap());
trailers.insert("grpc-message", "OK".parse().unwrap());
sender.send_trailers(trailers).await.unwrap();
// let res = Response::new("Hello, World!".into());
Ok(res)
}
#[tokio::main]
async fn main() {
let addr = SocketAddr::from(([127, 0, 0, 1], 3000));
let service = make_service_fn(|_conn| async {
Ok::<_, hyper::Error>(service_fn(|req: Request<Body>| async move {
if req.version() == Version::HTTP_2 {
let (_parts, body) = req.into_parts();
// 获取 body
let body_buf = body::to_bytes(body).await.unwrap();
let compressed_flag = &body_buf[0..1];
let proto_len = &body_buf[1..5];
let proto_buf = &body_buf[5..];
println!(
"{:?}, {:?}, {:?}",
compressed_flag,
proto_len,
proto_buf.len()
);
let flags = BigEndian::read_uint(compressed_flag, 1);
let len = BigEndian::read_uint(proto_len, 4);
let body_len = proto_buf.len() as u64;
// len should be equal body_len
println!(
"flags {}, len {:?}, proto_body_len: {}",
flags, len, body_len
);
let _pp = proto::helloworld::HelloRequest::decode(proto_buf).unwrap();
// println!("{:?}", pp);
// reply body
let hello_reply = proto::helloworld::HelloReply {
message: "haha".to_owned(),
};
let mut reply_buf: Vec<u8> = [].to_vec();
hello_reply.encode(&mut reply_buf).unwrap();
let reply_len = reply_buf.len() as u32;
let mut len_buf = [0; 4];
BigEndian::write_u32(&mut len_buf, reply_len);
println!("{:?}", reply_len);
let mut reply_body_buf = vec![0]; // 0 位是 compressed_flag
reply_body_buf.put(&len_buf[..]);
reply_body_buf.put(&reply_buf[..]);
println!("{:?}", reply_body_buf);
let (mut sender, body) = Body::channel();
let res = Response::builder().header("Foo", "Bar").body(body).unwrap();
sender.send_data(reply_body_buf.into()).await.unwrap();
let mut trailers = HeaderMap::new();
trailers.insert("grpc-status", "0".parse().unwrap());
trailers.insert("grpc-message", "OK".parse().unwrap());
sender.send_trailers(trailers).await.unwrap();
Ok(res)
} else {
// Note: it's usually better to return a Response
// with an appropriate StatusCode instead of an Err.
Err("not HTTP/2, abort connection")
}
}))
});
// let server = Server::bind(&addr).serve(make_svc);
let server = Server::bind(&addr).serve(service);
println!("Listening on http://{}", addr);
if let Err(e) = server.await {
eprintln!("server error: {}", e);
}
}
|
#[doc = "Register `DDRCTRL_POISONSTAT` reader"]
pub type R = crate::R<DDRCTRL_POISONSTAT_SPEC>;
#[doc = "Field `WR_POISON_INTR_0` reader - WR_POISON_INTR_0"]
pub type WR_POISON_INTR_0_R = crate::BitReader;
#[doc = "Field `WR_POISON_INTR_1` reader - WR_POISON_INTR_1"]
pub type WR_POISON_INTR_1_R = crate::BitReader;
#[doc = "Field `RD_POISON_INTR_0` reader - RD_POISON_INTR_0"]
pub type RD_POISON_INTR_0_R = crate::BitReader;
#[doc = "Field `RD_POISON_INTR_1` reader - RD_POISON_INTR_1"]
pub type RD_POISON_INTR_1_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - WR_POISON_INTR_0"]
#[inline(always)]
pub fn wr_poison_intr_0(&self) -> WR_POISON_INTR_0_R {
WR_POISON_INTR_0_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 1 - WR_POISON_INTR_1"]
#[inline(always)]
pub fn wr_poison_intr_1(&self) -> WR_POISON_INTR_1_R {
WR_POISON_INTR_1_R::new(((self.bits >> 1) & 1) != 0)
}
#[doc = "Bit 16 - RD_POISON_INTR_0"]
#[inline(always)]
pub fn rd_poison_intr_0(&self) -> RD_POISON_INTR_0_R {
RD_POISON_INTR_0_R::new(((self.bits >> 16) & 1) != 0)
}
#[doc = "Bit 17 - RD_POISON_INTR_1"]
#[inline(always)]
pub fn rd_poison_intr_1(&self) -> RD_POISON_INTR_1_R {
RD_POISON_INTR_1_R::new(((self.bits >> 17) & 1) != 0)
}
}
#[doc = "DDRCTRL AXI Poison status register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ddrctrl_poisonstat::R`](R). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DDRCTRL_POISONSTAT_SPEC;
impl crate::RegisterSpec for DDRCTRL_POISONSTAT_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ddrctrl_poisonstat::R`](R) reader structure"]
impl crate::Readable for DDRCTRL_POISONSTAT_SPEC {}
#[doc = "`reset()` method sets DDRCTRL_POISONSTAT to value 0"]
impl crate::Resettable for DDRCTRL_POISONSTAT_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
#![feature(untagged_unions)]
#![feature(c_variadic)]
pub mod defs;
pub mod exceptions;
pub mod predefined;
pub mod symbols;
pub mod error;
|
use serde_json;
use hangouts;
use pblite::Message;
// TODO: Come up with better names for everything here.
error_chain!{}
#[derive(Debug, PartialEq)]
pub struct ContainerArray {
pub channel_arrays: Vec<ChannelArray>,
}
impl ContainerArray {
pub fn parse(string: &str) -> Result<Self> {
let mut value =
serde_json::from_str::<serde_json::Value>(string).chain_err(|| "failed to parse json")?;
let array = value.as_array_mut().chain_err(|| "container is not array")?;
let channel_arrays = array
.drain(..)
.map(|val| ChannelArray::parse(val))
.collect::<Result<Vec<ChannelArray>>>()?;
Ok(Self {
channel_arrays: channel_arrays,
})
}
}
#[derive(Debug, PartialEq)]
pub struct ChannelArray {
pub array_id: u64,
pub payload: ChannelPayload,
}
impl ChannelArray {
pub fn parse(value: serde_json::Value) -> Result<Self> {
let inner_array = value.as_array().chain_err(|| "expected array")?;
let array_id = inner_array
.get(0)
.and_then(|val| val.as_u64())
.chain_err(|| "expected array id")?;
let raw_data_array = inner_array.get(1).chain_err(|| "expected data array")?;
let payload = match &raw_data_array[0] {
&serde_json::Value::String(ref s) if s == "noop" => ChannelPayload::Noop,
&serde_json::Value::Object(ref m) => {
let wrapper_str = m.get("p")
.and_then(|val| val.as_str())
.chain_err(|| "expected string")?;
let wrapper_val = serde_json::from_str::<serde_json::Value>(wrapper_str)
.chain_err(|| "failed to parse json")?;
let new_client_id_array = wrapper_val.get("3");
let new_proto_array = wrapper_val.get("2");
if new_client_id_array.is_some() {
let client_id = new_client_id_array
.and_then(|value| value.get("2"))
.and_then(|value| value.as_str())
.chain_err(|| "failed to parse client id")?;
ChannelPayload::NewClientID(client_id.to_owned())
} else if new_proto_array.is_some() {
let pblite_str = new_proto_array
.and_then(|obj| obj.get("2"))
.and_then(|val| val.as_str())
.chain_err(|| "failed to extract pblite string")?;
let mut pblite_val = serde_json::from_str::<serde_json::Value>(pblite_str)
.chain_err(|| "failed to parse pblite string as json")?;
let pblite_vec = pblite_val
.as_array_mut()
.chain_err(|| "pblite string is not array")?;
// Remove the pblite "header"
// TODO: This can panic, consider moving it to pblite instead.
pblite_vec.remove(0);
let batch_update = hangouts::BatchUpdate::from_vec(&pblite_vec)
.chain_err(|| "failed to parse BatchUpdate")?;
ChannelPayload::BatchUpdate(batch_update)
} else {
ChannelPayload::Unknown
}
}
_ => ChannelPayload::Unknown,
};
Ok(ChannelArray {
array_id: array_id,
payload: payload,
})
}
}
#[derive(Debug, PartialEq)]
pub enum ChannelPayload {
Noop,
Unknown,
NewClientID(String),
BatchUpdate(hangouts::BatchUpdate),
}
#[cfg(test)]
mod tests {
use hangouts;
use channel_parser::{ChannelArray, ChannelPayload, ContainerArray};
#[test]
fn test_parse_batch_update() {
let proto = "[[5,[{\"p\":\"{\\\"1\\\":{\\\"1\\\":{\\\"1\\\":{\\\"1\\\":1,\\\"2\\\":1}},\\\"4\\\":\\\"1521002546965\\\",\\\"5\\\":\\\"S4\\\"},\\\"2\\\":{\\\"1\\\":{\\\"1\\\":\\\"babel\\\",\\\"2\\\":\\\"conserver.google.com\\\"},\\\"2\\\":\\\"[\\\\\\\"cbu\\\\\\\",[[[0,null,\\\\\\\"173955572810212329\\\\\\\",null,1521002546845000]\\\\n,null,null,null,null,null,null,null,null,null,null,null,null,[[[\\\\\\\"lcsw_hangouts_E5EC3DFB\\\\\\\",\\\\\\\"2DA6A88554072FCA\\\\\\\"]\\\\n,30]\\\\n]\\\\n]\\\\n]\\\\n]\\\\n\\\"}}\"}]]\n]\n";
let batch_update = hangouts::BatchUpdate::default();
let mut container_array = ContainerArray::parse(proto).unwrap();
// Make the equality comparison simpler:
container_array.channel_arrays[0].payload = ChannelPayload::BatchUpdate(batch_update);
assert_eq!(
container_array,
ContainerArray {
channel_arrays: vec![
ChannelArray {
array_id: 5,
payload: ChannelPayload::BatchUpdate(hangouts::BatchUpdate::default()),
},
],
}
);
}
#[test]
fn test_parse_new_client_id() {
let client_id = "[[2,[{\"p\":\"{\\\"1\\\":{\\\"1\\\":{\\\"1\\\":{\\\"1\\\":1,\\\"2\\\":1}},\\\"4\\\":\\\"1521086182842\\\",\\\"5\\\":\\\"S1\\\"},\\\"3\\\":{\\\"1\\\":{\\\"1\\\":1},\\\"2\\\":\\\"lcsw_hangouts_00BBCF28\\\"}}\"}]]\n]\n";
let container_array = ContainerArray::parse(client_id).unwrap();
assert_eq!(
container_array,
ContainerArray {
channel_arrays: vec![
ChannelArray {
array_id: 2,
payload: ChannelPayload::NewClientID("lcsw_hangouts_00BBCF28".to_owned()),
},
],
}
);
}
#[test]
fn test_parse_noop() {
let noop = "[[6,[\"noop\"]\n]\n]\n";
let container_array = ContainerArray::parse(noop).unwrap();
assert_eq!(
container_array,
ContainerArray {
channel_arrays: vec![
ChannelArray {
array_id: 6,
payload: ChannelPayload::Noop,
},
],
}
);
}
}
|
#[macro_use]
extern crate diesel;
extern crate elastic;
#[macro_use]
extern crate elastic_derive;
use serde::Deserialize;
pub mod constants;
pub mod schema;
pub mod db;
pub mod eventstore;
pub mod eventpublisher;
pub mod dao;
#[derive(Deserialize, Debug)]
pub struct Config {
pub database_url: String,
pub kafka_brokers: Vec<String>,
pub snapshotter_kafka_consume_group: String,
pub projector_kafka_consume_group: String,
pub elastic_search_endpoint: String,
}
|
use std::env;
use std::fs::File;
use std::io::prelude::BufRead;
use std::io::BufReader;
use std::collections::HashMap;
use regex::Regex;
use std::collections::BTreeMap;
extern crate regex;
#[derive(Debug)]
struct WordStore {
counts: HashMap<String, u64>
}
impl WordStore {
fn new() -> WordStore {
WordStore {
counts: HashMap::new()
}
}
fn increment(&mut self, word: &str) {
let key = word.to_string();
let count = self.counts.entry(key).or_insert(0);
*count += 1;
}
fn display(&self, min_frequency: u64) {
let ordered: BTreeMap<_, _> = self.counts.iter()
.filter(|pair| pair.1 >= &min_frequency)
.collect();
for (word, count) in ordered {
println!("{}: {}", word, count);
}
}
}
fn normalize_word(word: &str) -> String {
let separators_re = Regex::new(r"[ ,.!?]+").unwrap();
let invalid_chars_re = Regex::new(r"[^a-z ]").unwrap();
let lower = word.to_lowercase();
let separators_replaced = separators_re.replace_all(&lower, " ");
invalid_chars_re.replace_all(&separators_replaced, "").to_string()
}
fn main() {
let arguments: Vec<String> = env::args().collect();
println!("args {:?}", arguments);
let filename = arguments[1].clone();
let min_frequency: u64 = arguments[2].parse().unwrap();
let file = File::open(filename).expect("Could not open file");
let reader = BufReader::new(file);
let mut word_store = WordStore::new();
for line in reader.lines() {
let line = line.expect("Could not read line");
let words = line.split(" ");
for word in words {
if word == "" {
continue;
} else {
let normalized_word = normalize_word(word);
word_store.increment(&normalized_word);
}
}
}
word_store.display(min_frequency);
} |
use core::MatrixArray;
use core::dimension::{U1, U2, U3};
use geometry::{Rotation, SimilarityBase, UnitQuaternion, UnitComplex};
/// A D-dimensional similarity.
pub type Similarity<N, D> = SimilarityBase<N, D, MatrixArray<N, D, U1>, Rotation<N, D>>;
/// A 2-dimensional similarity.
pub type Similarity2<N> = SimilarityBase<N, U2, MatrixArray<N, U2, U1>, UnitComplex<N>>;
/// A 3-dimensional similarity.
pub type Similarity3<N> = SimilarityBase<N, U3, MatrixArray<N, U3, U1>, UnitQuaternion<N>>;
/// A 2-dimensional similarity using a rotation matrix for its rotation part.
pub type SimilarityMatrix2<N> = Similarity<N, U2>;
/// A 3-dimensional similarity using a rotation matrix for its rotation part.
pub type SimilarityMatrix3<N> = Similarity<N, U3>;
|
//! A backend module for implementing the iterator like
//! [`iterator`][crate::iterator] module and the asynchronous
//! adapter crates.
//!
//! This module contains generic types which abstract over the concrete
//! IO type for the self-pipe. The motivation for having this abstraction
//! are the adapter crates for different asynchronous runtimes. The runtimes
//! provide their own wrappers for [`std::os::unix::net::UnixStream`]
//! which should be used as the internal self pipe. But large parts of the
//! remaining functionality doesn't depend directly onto the IO type and can
//! be reused.
//!
//! See also the [`SignalDelivery::with_pipe`] method for more information
//! about requirements the IO types have to fulfill.
//!
//! As a regular user you shouldn't need to use the types in this module.
//! Use the [`Signals`][crate::iterator::Signals] struct or one of the types
//! contained in the adapter libraries instead.
use std::borrow::{Borrow, BorrowMut};
use std::fmt::{Debug, Formatter, Result as FmtResult};
use std::io::Error;
use std::mem::MaybeUninit;
use std::os::unix::io::AsRawFd;
use std::ptr;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::{Arc, Mutex};
use libc::{self, c_int};
use super::exfiltrator::Exfiltrator;
use crate::low_level::pipe::{self, WakeMethod};
use crate::SigId;
/// Maximal signal number we support.
const MAX_SIGNUM: usize = 128;
trait SelfPipeWrite: Debug + Send + Sync {
fn wake_readers(&self);
}
impl<W: AsRawFd + Debug + Send + Sync> SelfPipeWrite for W {
fn wake_readers(&self) {
pipe::wake(self.as_raw_fd(), WakeMethod::Send);
}
}
#[derive(Debug)]
struct DeliveryState {
closed: AtomicBool,
registered_signal_ids: Mutex<Vec<Option<SigId>>>,
}
impl DeliveryState {
fn new() -> Self {
let ids = (0..MAX_SIGNUM).map(|_| None).collect();
Self {
closed: AtomicBool::new(false),
registered_signal_ids: Mutex::new(ids),
}
}
}
impl Drop for DeliveryState {
fn drop(&mut self) {
let lock = self.registered_signal_ids.lock().unwrap();
for id in lock.iter().filter_map(|s| *s) {
crate::low_level::unregister(id);
}
}
}
struct PendingSignals<E: Exfiltrator> {
exfiltrator: E,
slots: [E::Storage; MAX_SIGNUM],
}
impl<E: Exfiltrator> PendingSignals<E> {
fn new(exfiltrator: E) -> Self {
// Unfortunately, Default is not implemented for long arrays :-(
//
// Note that if the default impl panics, the already existing instances are leaked.
let mut slots = MaybeUninit::<[E::Storage; MAX_SIGNUM]>::uninit();
for i in 0..MAX_SIGNUM {
unsafe {
let slot: *mut E::Storage = slots.as_mut_ptr() as *mut _;
let slot = slot.add(i);
ptr::write(slot, E::Storage::default());
}
}
Self {
exfiltrator,
slots: unsafe { slots.assume_init() },
}
}
}
/// An internal trait to hide adding new signals into a Handle behind a dynamic dispatch.
trait AddSignal: Debug + Send + Sync {
fn add_signal(
self: Arc<Self>,
write: Arc<dyn SelfPipeWrite>,
signal: c_int,
) -> Result<SigId, Error>;
}
// Implemented manually because 1.36.0 doesn't yet support Debug for [X; BIG_NUMBER].
impl<E: Exfiltrator> Debug for PendingSignals<E> {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
fmt.debug_struct("PendingSignals")
.field("exfiltrator", &self.exfiltrator)
// While the array does not, the slice does implement Debug
.field("slots", &&self.slots[..])
.finish()
}
}
impl<E: Exfiltrator> AddSignal for PendingSignals<E> {
fn add_signal(
self: Arc<Self>,
write: Arc<dyn SelfPipeWrite>,
signal: c_int,
) -> Result<SigId, Error> {
assert!(signal >= 0);
assert!(
(signal as usize) < MAX_SIGNUM,
"Signal number {} too large. If your OS really supports such signal, file a bug",
signal,
);
assert!(
self.exfiltrator.supports_signal(signal),
"Signal {} not supported by exfiltrator {:?}",
signal,
self.exfiltrator,
);
self.exfiltrator.init(&self.slots[signal as usize], signal);
let action = move |act: &_| {
let slot = &self.slots[signal as usize];
let ex = &self.exfiltrator;
ex.store(slot, signal, act);
write.wake_readers();
};
let id = unsafe { signal_hook_registry::register_sigaction(signal, action) }?;
Ok(id)
}
}
/// A struct to control an instance of an associated type
/// (like for example [`Signals`][super::Signals]).
///
/// It allows to register more signal handlers and to shutdown the signal
/// delivery. You can [`clone`][Handle::clone] this type which isn't a
/// very expensive operation. The cloned instances can be shared between
/// multiple threads.
#[derive(Debug, Clone)]
pub struct Handle {
pending: Arc<dyn AddSignal>,
write: Arc<dyn SelfPipeWrite>,
delivery_state: Arc<DeliveryState>,
}
impl Handle {
fn new<W>(write: W, pending: Arc<dyn AddSignal>) -> Self
where
W: 'static + SelfPipeWrite,
{
Self {
pending,
write: Arc::new(write),
delivery_state: Arc::new(DeliveryState::new()),
}
}
/// Registers another signal to the set watched by the associated instance.
///
/// # Notes
///
/// * This is safe to call concurrently from whatever thread.
/// * This is *not* safe to call from within a signal handler.
/// * If the signal number was already registered previously, this is a no-op.
/// * If this errors, the original set of signals is left intact.
///
/// # Panics
///
/// * If the given signal is [forbidden][crate::FORBIDDEN].
/// * If the signal number is negative or larger than internal limit. The limit should be
/// larger than any supported signal the OS supports.
/// * If the relevant [`Exfiltrator`] does not support this particular signal. The default
/// [`SignalOnly`] one supports all signals.
pub fn add_signal(&self, signal: c_int) -> Result<(), Error> {
let mut lock = self.delivery_state.registered_signal_ids.lock().unwrap();
// Already registered, ignoring
if lock[signal as usize].is_some() {
return Ok(());
}
let id = Arc::clone(&self.pending).add_signal(Arc::clone(&self.write), signal)?;
lock[signal as usize] = Some(id);
Ok(())
}
/// Closes the associated instance.
///
/// This is meant to signalize termination of the signal delivery process.
/// After calling close:
///
/// * [`is_closed`][Handle::is_closed] will return true.
/// * All currently blocking operations of associated instances
/// are interrupted and terminate.
/// * Any further operations will not block.
/// * Further signals may or may not be returned from the iterators. However, if any are
/// returned, these are real signals that happened.
///
/// The goal is to be able to shut down any background thread that handles only the signals.
pub fn close(&self) {
self.delivery_state.closed.store(true, Ordering::SeqCst);
self.write.wake_readers();
}
/// Is it closed?
///
/// See [`close`][Handle::close].
pub fn is_closed(&self) -> bool {
self.delivery_state.closed.load(Ordering::SeqCst)
}
}
/// A struct for delivering received signals to the main program flow.
/// The self-pipe IO type is generic. See the
/// [`with_pipe`][SignalDelivery::with_pipe] method for requirements
/// for the IO type.
#[derive(Debug)]
pub struct SignalDelivery<R, E: Exfiltrator> {
read: R,
handle: Handle,
pending: Arc<PendingSignals<E>>,
}
impl<R, E: Exfiltrator> SignalDelivery<R, E>
where
R: 'static + AsRawFd + Send + Sync,
{
/// Creates the `SignalDelivery` structure.
///
/// The read and write arguments must be the ends of a suitable pipe type. These are used
/// for communication between the signal handler and main program flow.
///
/// Registers all the signals listed. The same restrictions (panics, errors) apply as with
/// [`add_signal`][Handle::add_signal].
///
/// # Requirements for the pipe type
///
/// * Must support [`send`](https://man7.org/linux/man-pages/man2/send.2.html) for
/// asynchronously writing bytes to the write end
/// * Must support [`recv`](https://man7.org/linux/man-pages/man2/recv.2.html) for
/// reading bytes from the read end
///
/// So UnixStream is a good choice for this.
pub fn with_pipe<I, S, W>(read: R, write: W, exfiltrator: E, signals: I) -> Result<Self, Error>
where
I: IntoIterator<Item = S>,
S: Borrow<c_int>,
W: 'static + AsRawFd + Debug + Send + Sync,
{
let pending = Arc::new(PendingSignals::new(exfiltrator));
let pending_add_signal = Arc::clone(&pending);
let handle = Handle::new(write, pending_add_signal);
let me = Self {
read,
handle,
pending,
};
for sig in signals {
me.handle.add_signal(*sig.borrow())?;
}
Ok(me)
}
/// Get a reference to the read end of the self pipe
///
/// You may use this method to register the underlying file descriptor
/// with an eventing system (e. g. epoll) to get notified if there are
/// bytes in the pipe. If the event system reports the file descriptor
/// ready for reading you can then call [`pending`][SignalDelivery::pending]
/// to get the arrived signals.
pub fn get_read(&self) -> &R {
&self.read
}
/// Get a mutable reference to the read end of the self pipe
///
/// See the [`get_read`][SignalDelivery::get_read] method for some additional
/// information.
pub fn get_read_mut(&mut self) -> &mut R {
&mut self.read
}
/// Drains all data from the internal self-pipe. This method will never block.
fn flush(&mut self) {
const SIZE: usize = 1024;
let mut buff = [0u8; SIZE];
unsafe {
// Draining the data in the self pipe. We ignore all errors on purpose. This
// should not be something like closed file descriptor. It could EAGAIN, but
// that's OK in case we say MSG_DONTWAIT. If it's EINTR, then it's OK too,
// it'll only create a spurious wakeup.
#[cfg(target_os = "aix")]
let nowait_flag = libc::MSG_NONBLOCK;
#[cfg(not(target_os = "aix"))]
let nowait_flag = libc::MSG_DONTWAIT;
while libc::recv(
self.read.as_raw_fd(),
buff.as_mut_ptr() as *mut libc::c_void,
SIZE,
nowait_flag,
) > 0
{}
}
}
/// Returns an iterator of already received signals.
///
/// This returns an iterator over all the signal numbers of the signals received since last
/// time they were read (out of the set registered by this `SignalDelivery` instance). Note
/// that they are returned in arbitrary order and a signal number is returned only once even
/// if it was received multiple times.
///
/// This method returns immediately (does not block) and may produce an empty iterator if
/// there are no signals ready.
pub fn pending(&mut self) -> Pending<E> {
self.flush();
Pending::new(Arc::clone(&self.pending))
}
/// Checks the reading end of the self pipe for available signals.
///
/// If there are no signals available or this instance was already closed it returns
/// [`Option::None`]. If there are some signals it returns a [`Pending`]
/// instance wrapped inside a [`Option::Some`]. However, due to implementation details,
/// this still can produce an empty iterator.
///
/// This method doesn't check the reading end by itself but uses the passed in callback.
/// This method blocks if and only if the callback blocks trying to read some bytes.
pub fn poll_pending<F>(&mut self, has_signals: &mut F) -> Result<Option<Pending<E>>, Error>
where
F: FnMut(&mut R) -> Result<bool, Error>,
{
if self.handle.is_closed() {
return Ok(None);
}
match has_signals(self.get_read_mut()) {
Ok(false) => Ok(None),
Ok(true) => Ok(Some(self.pending())),
Err(err) => Err(err),
}
}
/// Get a [`Handle`] for this `SignalDelivery` instance.
///
/// This can be used to add further signals or close the whole
/// signal delivery mechanism.
pub fn handle(&self) -> Handle {
self.handle.clone()
}
}
/// The iterator of one batch of signals.
///
/// This is returned by the [`pending`][SignalDelivery::pending] method.
#[derive(Debug)]
pub struct Pending<E: Exfiltrator> {
pending: Arc<PendingSignals<E>>,
position: usize,
}
impl<E: Exfiltrator> Pending<E> {
fn new(pending: Arc<PendingSignals<E>>) -> Self {
Self {
pending,
position: 0,
}
}
}
impl<E: Exfiltrator> Iterator for Pending<E> {
type Item = E::Output;
fn next(&mut self) -> Option<E::Output> {
while self.position < self.pending.slots.len() {
let sig = self.position;
let slot = &self.pending.slots[sig];
let result = self.pending.exfiltrator.load(slot, sig as c_int);
if result.is_some() {
return result;
} else {
self.position += 1;
}
}
None
}
}
/// Possible results of the [`poll_signal`][SignalIterator::poll_signal] function.
pub enum PollResult<O> {
/// A signal arrived
Signal(O),
/// There are no signals yet but there may arrive some in the future
Pending,
/// The iterator was closed. There won't be any signals reported from now on.
Closed,
/// An error happened during polling for arrived signals.
Err(Error),
}
/// An infinite iterator of received signals.
pub struct SignalIterator<SD, E: Exfiltrator> {
signals: SD,
iter: Pending<E>,
}
impl<SD, E: Exfiltrator> SignalIterator<SD, E> {
/// Create a new infinite iterator for signals registered with the passed
/// in [`SignalDelivery`] instance.
pub fn new<R>(mut signals: SD) -> Self
where
SD: BorrowMut<SignalDelivery<R, E>>,
R: 'static + AsRawFd + Send + Sync,
{
let iter = signals.borrow_mut().pending();
Self { signals, iter }
}
/// Return a signal if there is one or tell the caller that there is none at the moment.
///
/// You have to pass in a callback which checks the underlying reading end of the pipe if
/// there may be any pending signals. This callback may or may not block. If the callback
/// returns [`true`] this method will try to fetch the next signal and return it as a
/// [`PollResult::Signal`]. If the callback returns [`false`] the method will return
/// [`PollResult::Pending`] and assume it will be called again at a later point in time.
/// The callback may be called any number of times by this function.
///
/// If the iterator was closed by the [`close`][Handle::close] method of the associated
/// [`Handle`] this method will return [`PollResult::Closed`].
pub fn poll_signal<R, F>(&mut self, has_signals: &mut F) -> PollResult<E::Output>
where
SD: BorrowMut<SignalDelivery<R, E>>,
R: 'static + AsRawFd + Send + Sync,
F: FnMut(&mut R) -> Result<bool, Error>,
{
// The loop is necessary because it is possible that a signal was already consumed
// by a previous pending iterator due to the asynchronous nature of signals and
// always moving to the end of the iterator before calling has_more.
while !self.signals.borrow_mut().handle.is_closed() {
if let Some(result) = self.iter.next() {
return PollResult::Signal(result);
}
match self.signals.borrow_mut().poll_pending(has_signals) {
Ok(Some(pending)) => self.iter = pending,
Ok(None) => return PollResult::Pending,
Err(err) => return PollResult::Err(err),
}
}
PollResult::Closed
}
/// Get a shareable [`Handle`] for this instance.
///
/// This can be used to add further signals or terminate the whole
/// signal iteration using the [`close`][Handle::close] method.
pub fn handle<R>(&self) -> Handle
where
SD: Borrow<SignalDelivery<R, E>>,
R: 'static + AsRawFd + Send + Sync,
{
self.signals.borrow().handle()
}
}
/// A signal iterator which consumes a [`SignalDelivery`] instance and takes
/// ownership of it.
pub type OwningSignalIterator<R, E> = SignalIterator<SignalDelivery<R, E>, E>;
/// A signal iterator which takes a mutable reference to a [`SignalDelivery`]
/// instance.
pub type RefSignalIterator<'a, R, E> = SignalIterator<&'a mut SignalDelivery<R, E>, E>;
|
use std::fmt;
use std::str::CowString;
use super::escs::{ANSI_D};
use super::nav::{save_cursor,
restore_cursor,
jump_string,
Point,
Frame};
/// XString: these are the valid types of string for xterm markup.
///
/// This will eventually be hidden.
pub enum XString<'b> {Esc(CowString<'b>), Jump(CowString<'b>), Text(CowString<'b>)}
impl<'b> fmt::Show for XString<'b> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = match *self {
XString::Esc(ref q) => q,
XString::Jump(ref q) => q,
XString::Text(ref q) => q,
};
write!(f,"{}",s)
}
}
/// XVec: represents a composed string.
///
/// There are three types: Text contains the content, Esc is an escaped
/// sequence which is not a jump, and Jump is a esc[ which is.
pub struct XVec<'b> {
pub v: Vec<XString<'b>>
}
impl<'b> XVec<'b> {
pub fn print (&self) -> () {
for q in self.v.iter() {
match *q {
XString::Esc(ref q) => print!("{}", q),
XString::Jump(ref q) => print!("{}", q),
XString::Text(ref q) => print!("{}", q),
}
}
}
pub fn print_clean (&self) -> () {
save_cursor();
print!("{}",ANSI_D);
self.print();
print!("{}",ANSI_D);
restore_cursor();
}
/*
pub fn to_string(&self) -> String {
let mut s = "".to_string();
for q in self.v.iter() {
match *q {
XString::Esc(ref q) => s = s.push_str(),
XString::Jump(ref q) => s = s.push_str(q),
XString::Text(ref q) => s = s.push_str(q),
}
}
s
}
*/
}
/*
impl<'b> fmt::Show for XVec<'b> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
}
}
*/
pub fn print_x ( xstr: XString ) -> () {
match xstr {
XString::Esc(s) => println!("{}", s),
XString::Jump(s) => println!("{}", s),
XString::Text(s) => println!("{}", s),
}
}
pub fn make_jump<'b> (pt: Point) -> XString<'b> {
XString::Jump(jump_string(pt).into_cow())
}
pub fn line_split<'b> (s: &'b str) -> XVec<'b> {
//! splits a line
let mut x_vec = XVec { v: vec![]};
for line in s.split('\n') {
x_vec.v.push(XString::Text(line.into_cow()));
};
x_vec
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - SRAM/NOR flash control register 0"]
pub snctl0: SNCTL0,
#[doc = "0x04 - SRAM/NOR flash timing configuration register 0"]
pub sntcfg0: SNTCFG0,
#[doc = "0x08 - SRAM/NOR flash control register 1"]
pub snctl1: SNCTL1,
#[doc = "0x0c - SRAM/NOR flash timing configuration register 1"]
pub sntcfg1: SNTCFG1,
#[doc = "0x10 - SRAM/NOR flash control register 2"]
pub snctl2: SNCTL2,
#[doc = "0x14 - SRAM/NOR flash timing configuration register 2"]
pub sntcfg2: SNTCFG2,
#[doc = "0x18 - SRAM/NOR flash control register 3"]
pub snctl3: SNCTL3,
#[doc = "0x1c - SRAM/NOR flash timing configuration register 3"]
pub sntcfg3: SNTCFG3,
_reserved8: [u8; 64usize],
#[doc = "0x60 - NAND flash/PC card control register 1"]
pub npctl1: NPCTL1,
#[doc = "0x64 - NAND flash/PC card interrupt enable register 1"]
pub npinten1: NPINTEN1,
#[doc = "0x68 - NAND flash/PC card common space timing configuration register 1"]
pub npctcfg1: NPCTCFG1,
#[doc = "0x6c - NAND flash/PC card attribute space timing configuration register 1"]
pub npatcfg1: NPATCFG1,
_reserved12: [u8; 4usize],
#[doc = "0x74 - NAND flash ECC register 1"]
pub necc1: NECC1,
_reserved13: [u8; 8usize],
#[doc = "0x80 - NAND flash/PC card control register 2"]
pub npctl2: NPCTL2,
#[doc = "0x84 - NAND flash/PC card interrupt enable register 2"]
pub npinten2: NPINTEN2,
#[doc = "0x88 - NAND flash/PC card common space timing configuration register 2"]
pub npctcfg2: NPCTCFG2,
#[doc = "0x8c - NAND flash/PC card attribute space timing configuration register 2"]
pub npatcfg2: NPATCFG2,
_reserved17: [u8; 4usize],
#[doc = "0x94 - NAND flash ECC register 2"]
pub necc2: NECC2,
_reserved18: [u8; 8usize],
#[doc = "0xa0 - NAND flash/PC card control register 3"]
pub npctl3: NPCTL3,
#[doc = "0xa4 - NAND flash/PC card interrupt enable register 3"]
pub npinten3: NPINTEN3,
#[doc = "0xa8 - NAND flash/PC card common space timing configuration register 3"]
pub npctcfg3: NPCTCFG3,
#[doc = "0xac - NAND flash/PC card attribute space timing configuration register 3"]
pub npatcfg3: NPATCFG3,
#[doc = "0xb0 - PC card I/O space timing configuration register"]
pub piotcfg3: PIOTCFG3,
_reserved23: [u8; 80usize],
#[doc = "0x104 - SRAM/NOR flash write timing configuration register 0"]
pub snwtcfg0: SNWTCFG0,
_reserved24: [u8; 4usize],
#[doc = "0x10c - SRAM/NOR flash write timing configuration register 1"]
pub snwtcfg1: SNWTCFG1,
_reserved25: [u8; 4usize],
#[doc = "0x114 - SRAM/NOR flash write timing configuration register 2"]
pub snwtcfg2: SNWTCFG2,
_reserved26: [u8; 4usize],
#[doc = "0x11c - SRAM/NOR flash write timing configuration register 3"]
pub snwtcfg3: SNWTCFG3,
}
#[doc = "SRAM/NOR flash control register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [snctl0](snctl0) module"]
pub type SNCTL0 = crate::Reg<u32, _SNCTL0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SNCTL0;
#[doc = "`read()` method returns [snctl0::R](snctl0::R) reader structure"]
impl crate::Readable for SNCTL0 {}
#[doc = "`write(|w| ..)` method takes [snctl0::W](snctl0::W) writer structure"]
impl crate::Writable for SNCTL0 {}
#[doc = "SRAM/NOR flash control register 0"]
pub mod snctl0;
#[doc = "SRAM/NOR flash timing configuration register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [sntcfg0](sntcfg0) module"]
pub type SNTCFG0 = crate::Reg<u32, _SNTCFG0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SNTCFG0;
#[doc = "`read()` method returns [sntcfg0::R](sntcfg0::R) reader structure"]
impl crate::Readable for SNTCFG0 {}
#[doc = "`write(|w| ..)` method takes [sntcfg0::W](sntcfg0::W) writer structure"]
impl crate::Writable for SNTCFG0 {}
#[doc = "SRAM/NOR flash timing configuration register 0"]
pub mod sntcfg0;
#[doc = "SRAM/NOR flash control register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [snctl1](snctl1) module"]
pub type SNCTL1 = crate::Reg<u32, _SNCTL1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SNCTL1;
#[doc = "`read()` method returns [snctl1::R](snctl1::R) reader structure"]
impl crate::Readable for SNCTL1 {}
#[doc = "`write(|w| ..)` method takes [snctl1::W](snctl1::W) writer structure"]
impl crate::Writable for SNCTL1 {}
#[doc = "SRAM/NOR flash control register 1"]
pub mod snctl1;
#[doc = "SRAM/NOR flash timing configuration register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [sntcfg1](sntcfg1) module"]
pub type SNTCFG1 = crate::Reg<u32, _SNTCFG1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SNTCFG1;
#[doc = "`read()` method returns [sntcfg1::R](sntcfg1::R) reader structure"]
impl crate::Readable for SNTCFG1 {}
#[doc = "`write(|w| ..)` method takes [sntcfg1::W](sntcfg1::W) writer structure"]
impl crate::Writable for SNTCFG1 {}
#[doc = "SRAM/NOR flash timing configuration register 1"]
pub mod sntcfg1;
#[doc = "SRAM/NOR flash control register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [snctl2](snctl2) module"]
pub type SNCTL2 = crate::Reg<u32, _SNCTL2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SNCTL2;
#[doc = "`read()` method returns [snctl2::R](snctl2::R) reader structure"]
impl crate::Readable for SNCTL2 {}
#[doc = "`write(|w| ..)` method takes [snctl2::W](snctl2::W) writer structure"]
impl crate::Writable for SNCTL2 {}
#[doc = "SRAM/NOR flash control register 2"]
pub mod snctl2;
#[doc = "SRAM/NOR flash timing configuration register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [sntcfg2](sntcfg2) module"]
pub type SNTCFG2 = crate::Reg<u32, _SNTCFG2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SNTCFG2;
#[doc = "`read()` method returns [sntcfg2::R](sntcfg2::R) reader structure"]
impl crate::Readable for SNTCFG2 {}
#[doc = "`write(|w| ..)` method takes [sntcfg2::W](sntcfg2::W) writer structure"]
impl crate::Writable for SNTCFG2 {}
#[doc = "SRAM/NOR flash timing configuration register 2"]
pub mod sntcfg2;
#[doc = "SRAM/NOR flash control register 3\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [snctl3](snctl3) module"]
pub type SNCTL3 = crate::Reg<u32, _SNCTL3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SNCTL3;
#[doc = "`read()` method returns [snctl3::R](snctl3::R) reader structure"]
impl crate::Readable for SNCTL3 {}
#[doc = "`write(|w| ..)` method takes [snctl3::W](snctl3::W) writer structure"]
impl crate::Writable for SNCTL3 {}
#[doc = "SRAM/NOR flash control register 3"]
pub mod snctl3;
#[doc = "SRAM/NOR flash timing configuration register 3\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [sntcfg3](sntcfg3) module"]
pub type SNTCFG3 = crate::Reg<u32, _SNTCFG3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SNTCFG3;
#[doc = "`read()` method returns [sntcfg3::R](sntcfg3::R) reader structure"]
impl crate::Readable for SNTCFG3 {}
#[doc = "`write(|w| ..)` method takes [sntcfg3::W](sntcfg3::W) writer structure"]
impl crate::Writable for SNTCFG3 {}
#[doc = "SRAM/NOR flash timing configuration register 3"]
pub mod sntcfg3;
#[doc = "SRAM/NOR flash write timing configuration register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [snwtcfg0](snwtcfg0) module"]
pub type SNWTCFG0 = crate::Reg<u32, _SNWTCFG0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SNWTCFG0;
#[doc = "`read()` method returns [snwtcfg0::R](snwtcfg0::R) reader structure"]
impl crate::Readable for SNWTCFG0 {}
#[doc = "`write(|w| ..)` method takes [snwtcfg0::W](snwtcfg0::W) writer structure"]
impl crate::Writable for SNWTCFG0 {}
#[doc = "SRAM/NOR flash write timing configuration register 0"]
pub mod snwtcfg0;
#[doc = "SRAM/NOR flash write timing configuration register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [snwtcfg1](snwtcfg1) module"]
pub type SNWTCFG1 = crate::Reg<u32, _SNWTCFG1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SNWTCFG1;
#[doc = "`read()` method returns [snwtcfg1::R](snwtcfg1::R) reader structure"]
impl crate::Readable for SNWTCFG1 {}
#[doc = "`write(|w| ..)` method takes [snwtcfg1::W](snwtcfg1::W) writer structure"]
impl crate::Writable for SNWTCFG1 {}
#[doc = "SRAM/NOR flash write timing configuration register 1"]
pub mod snwtcfg1;
#[doc = "SRAM/NOR flash write timing configuration register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [snwtcfg2](snwtcfg2) module"]
pub type SNWTCFG2 = crate::Reg<u32, _SNWTCFG2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SNWTCFG2;
#[doc = "`read()` method returns [snwtcfg2::R](snwtcfg2::R) reader structure"]
impl crate::Readable for SNWTCFG2 {}
#[doc = "`write(|w| ..)` method takes [snwtcfg2::W](snwtcfg2::W) writer structure"]
impl crate::Writable for SNWTCFG2 {}
#[doc = "SRAM/NOR flash write timing configuration register 2"]
pub mod snwtcfg2;
#[doc = "SRAM/NOR flash write timing configuration register 3\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [snwtcfg3](snwtcfg3) module"]
pub type SNWTCFG3 = crate::Reg<u32, _SNWTCFG3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _SNWTCFG3;
#[doc = "`read()` method returns [snwtcfg3::R](snwtcfg3::R) reader structure"]
impl crate::Readable for SNWTCFG3 {}
#[doc = "`write(|w| ..)` method takes [snwtcfg3::W](snwtcfg3::W) writer structure"]
impl crate::Writable for SNWTCFG3 {}
#[doc = "SRAM/NOR flash write timing configuration register 3"]
pub mod snwtcfg3;
#[doc = "NAND flash/PC card control register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [npctl1](npctl1) module"]
pub type NPCTL1 = crate::Reg<u32, _NPCTL1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NPCTL1;
#[doc = "`read()` method returns [npctl1::R](npctl1::R) reader structure"]
impl crate::Readable for NPCTL1 {}
#[doc = "`write(|w| ..)` method takes [npctl1::W](npctl1::W) writer structure"]
impl crate::Writable for NPCTL1 {}
#[doc = "NAND flash/PC card control register 1"]
pub mod npctl1;
#[doc = "NAND flash/PC card control register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [npctl2](npctl2) module"]
pub type NPCTL2 = crate::Reg<u32, _NPCTL2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NPCTL2;
#[doc = "`read()` method returns [npctl2::R](npctl2::R) reader structure"]
impl crate::Readable for NPCTL2 {}
#[doc = "`write(|w| ..)` method takes [npctl2::W](npctl2::W) writer structure"]
impl crate::Writable for NPCTL2 {}
#[doc = "NAND flash/PC card control register 2"]
pub mod npctl2;
#[doc = "NAND flash/PC card control register 3\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [npctl3](npctl3) module"]
pub type NPCTL3 = crate::Reg<u32, _NPCTL3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NPCTL3;
#[doc = "`read()` method returns [npctl3::R](npctl3::R) reader structure"]
impl crate::Readable for NPCTL3 {}
#[doc = "`write(|w| ..)` method takes [npctl3::W](npctl3::W) writer structure"]
impl crate::Writable for NPCTL3 {}
#[doc = "NAND flash/PC card control register 3"]
pub mod npctl3;
#[doc = "NAND flash/PC card interrupt enable register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [npinten1](npinten1) module"]
pub type NPINTEN1 = crate::Reg<u32, _NPINTEN1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NPINTEN1;
#[doc = "`read()` method returns [npinten1::R](npinten1::R) reader structure"]
impl crate::Readable for NPINTEN1 {}
#[doc = "`write(|w| ..)` method takes [npinten1::W](npinten1::W) writer structure"]
impl crate::Writable for NPINTEN1 {}
#[doc = "NAND flash/PC card interrupt enable register 1"]
pub mod npinten1;
#[doc = "NAND flash/PC card interrupt enable register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [npinten2](npinten2) module"]
pub type NPINTEN2 = crate::Reg<u32, _NPINTEN2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NPINTEN2;
#[doc = "`read()` method returns [npinten2::R](npinten2::R) reader structure"]
impl crate::Readable for NPINTEN2 {}
#[doc = "`write(|w| ..)` method takes [npinten2::W](npinten2::W) writer structure"]
impl crate::Writable for NPINTEN2 {}
#[doc = "NAND flash/PC card interrupt enable register 2"]
pub mod npinten2;
#[doc = "NAND flash/PC card interrupt enable register 3\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [npinten3](npinten3) module"]
pub type NPINTEN3 = crate::Reg<u32, _NPINTEN3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NPINTEN3;
#[doc = "`read()` method returns [npinten3::R](npinten3::R) reader structure"]
impl crate::Readable for NPINTEN3 {}
#[doc = "`write(|w| ..)` method takes [npinten3::W](npinten3::W) writer structure"]
impl crate::Writable for NPINTEN3 {}
#[doc = "NAND flash/PC card interrupt enable register 3"]
pub mod npinten3;
#[doc = "NAND flash/PC card common space timing configuration register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [npctcfg1](npctcfg1) module"]
pub type NPCTCFG1 = crate::Reg<u32, _NPCTCFG1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NPCTCFG1;
#[doc = "`read()` method returns [npctcfg1::R](npctcfg1::R) reader structure"]
impl crate::Readable for NPCTCFG1 {}
#[doc = "`write(|w| ..)` method takes [npctcfg1::W](npctcfg1::W) writer structure"]
impl crate::Writable for NPCTCFG1 {}
#[doc = "NAND flash/PC card common space timing configuration register 1"]
pub mod npctcfg1;
#[doc = "NAND flash/PC card common space timing configuration register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [npctcfg2](npctcfg2) module"]
pub type NPCTCFG2 = crate::Reg<u32, _NPCTCFG2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NPCTCFG2;
#[doc = "`read()` method returns [npctcfg2::R](npctcfg2::R) reader structure"]
impl crate::Readable for NPCTCFG2 {}
#[doc = "`write(|w| ..)` method takes [npctcfg2::W](npctcfg2::W) writer structure"]
impl crate::Writable for NPCTCFG2 {}
#[doc = "NAND flash/PC card common space timing configuration register 2"]
pub mod npctcfg2;
#[doc = "NAND flash/PC card common space timing configuration register 3\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [npctcfg3](npctcfg3) module"]
pub type NPCTCFG3 = crate::Reg<u32, _NPCTCFG3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NPCTCFG3;
#[doc = "`read()` method returns [npctcfg3::R](npctcfg3::R) reader structure"]
impl crate::Readable for NPCTCFG3 {}
#[doc = "`write(|w| ..)` method takes [npctcfg3::W](npctcfg3::W) writer structure"]
impl crate::Writable for NPCTCFG3 {}
#[doc = "NAND flash/PC card common space timing configuration register 3"]
pub mod npctcfg3;
#[doc = "NAND flash/PC card attribute space timing configuration register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [npatcfg1](npatcfg1) module"]
pub type NPATCFG1 = crate::Reg<u32, _NPATCFG1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NPATCFG1;
#[doc = "`read()` method returns [npatcfg1::R](npatcfg1::R) reader structure"]
impl crate::Readable for NPATCFG1 {}
#[doc = "`write(|w| ..)` method takes [npatcfg1::W](npatcfg1::W) writer structure"]
impl crate::Writable for NPATCFG1 {}
#[doc = "NAND flash/PC card attribute space timing configuration register 1"]
pub mod npatcfg1;
#[doc = "NAND flash/PC card attribute space timing configuration register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [npatcfg2](npatcfg2) module"]
pub type NPATCFG2 = crate::Reg<u32, _NPATCFG2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NPATCFG2;
#[doc = "`read()` method returns [npatcfg2::R](npatcfg2::R) reader structure"]
impl crate::Readable for NPATCFG2 {}
#[doc = "`write(|w| ..)` method takes [npatcfg2::W](npatcfg2::W) writer structure"]
impl crate::Writable for NPATCFG2 {}
#[doc = "NAND flash/PC card attribute space timing configuration register 2"]
pub mod npatcfg2;
#[doc = "NAND flash/PC card attribute space timing configuration register 3\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [npatcfg3](npatcfg3) module"]
pub type NPATCFG3 = crate::Reg<u32, _NPATCFG3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NPATCFG3;
#[doc = "`read()` method returns [npatcfg3::R](npatcfg3::R) reader structure"]
impl crate::Readable for NPATCFG3 {}
#[doc = "`write(|w| ..)` method takes [npatcfg3::W](npatcfg3::W) writer structure"]
impl crate::Writable for NPATCFG3 {}
#[doc = "NAND flash/PC card attribute space timing configuration register 3"]
pub mod npatcfg3;
#[doc = "PC card I/O space timing configuration register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [piotcfg3](piotcfg3) module"]
pub type PIOTCFG3 = crate::Reg<u32, _PIOTCFG3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PIOTCFG3;
#[doc = "`read()` method returns [piotcfg3::R](piotcfg3::R) reader structure"]
impl crate::Readable for PIOTCFG3 {}
#[doc = "`write(|w| ..)` method takes [piotcfg3::W](piotcfg3::W) writer structure"]
impl crate::Writable for PIOTCFG3 {}
#[doc = "PC card I/O space timing configuration register"]
pub mod piotcfg3;
#[doc = "NAND flash ECC register 1\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [necc1](necc1) module"]
pub type NECC1 = crate::Reg<u32, _NECC1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NECC1;
#[doc = "`read()` method returns [necc1::R](necc1::R) reader structure"]
impl crate::Readable for NECC1 {}
#[doc = "NAND flash ECC register 1"]
pub mod necc1;
#[doc = "NAND flash ECC register 2\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [necc2](necc2) module"]
pub type NECC2 = crate::Reg<u32, _NECC2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _NECC2;
#[doc = "`read()` method returns [necc2::R](necc2::R) reader structure"]
impl crate::Readable for NECC2 {}
#[doc = "NAND flash ECC register 2"]
pub mod necc2;
|
use regex::Regex;
use std::cell::RefCell;
use std::collections::HashMap;
use std::io::{self};
struct Rule {
str_value: String,
rule: Option<String>,
}
impl Rule {
fn to_regex(&mut self, rules: &HashMap<i32, RefCell<Rule>>) -> String {
match &self.rule {
Some(rule) => {
return rule.to_string();
}
None => (),
};
// ADD RULE BUILD
if self.str_value.len() == 1 {
let re = Regex::new("^[a-zA-Z]$").unwrap();
if !re.is_match(&self.str_value) {
println!("NOPE {}", self.str_value);
}
self.rule = Some(self.str_value.to_string());
return self.str_value.to_string();
}
let groups: Vec<String> = self
.str_value
.split("|")
.map(|x| x.trim().to_string())
.collect();
let mut full_rule: String = "(".to_owned();
for (i, group) in groups.iter().enumerate() {
let other_rules: Vec<i32> = group
.split(" ")
.map(|x| x.parse::<i32>().unwrap())
.collect();
let mut s: String = "".to_owned();
for r in other_rules.iter() {
let tmp = rules.get(r).unwrap().borrow_mut().to_regex(&rules);
s.push_str(&tmp);
}
if i != groups.len() - 1 {
s.push_str(&"|");
}
full_rule.push_str(&s);
}
full_rule.push_str(&")");
self.rule = Some(full_rule.to_string());
return full_rule.to_string();
}
}
fn main() -> io::Result<()> {
let files_results = vec![
("test.txt", 2, 1),
("test2.txt", 3, 12),
("input.txt", 200, 407),
];
for (f, result_1, result_2) in files_results.iter() {
println!("{}", f);
let file_content: Vec<String> = std::fs::read_to_string(f)?
.lines()
.map(|x| x.to_string())
.collect();
let mut rules: HashMap<i32, RefCell<Rule>> = HashMap::with_capacity(file_content.len());
let mut start = 0;
for (i, line) in file_content.iter().enumerate() {
let rule_line: Vec<&str> = line.split(":").collect();
if rule_line.len() == 1 {
start = i;
break;
}
let rule_no = rule_line[0].parse::<i32>().unwrap();
rules.insert(
rule_no,
RefCell::new(Rule {
str_value: rule_line[1].trim().to_string().replace("\"", ""),
rule: None,
}),
);
}
let rule_zero = format!("^{}$", rules.get(&0).unwrap().borrow_mut().to_regex(&rules));
let regex_zero = Regex::new(&rule_zero).unwrap();
let mut sum = 0;
for line in file_content[start + 1..file_content.len()].iter() {
if regex_zero.is_match(&line) {
sum += 1;
}
}
assert_eq!(sum, *result_1);
if !rules.contains_key(&42) {
println!("{} didn't have a rule 42, continuing", f);
continue;
};
if !rules.contains_key(&31) {
println!("{} didn't have a rule 31, continuing", f);
continue;
};
let rule_42 = format!("{}", rules.get(&42).unwrap().borrow_mut().to_regex(&rules));
let rule_31 = format!("{}", rules.get(&31).unwrap().borrow_mut().to_regex(&rules));
rules.insert(
8,
RefCell::new(Rule {
str_value: "42 | 42 8".to_string(),
rule: Some(format!("({}+)", rule_42)),
}),
);
let mut tmp: String = format!("({}{})?", rule_42, rule_31);
for _ in 0..20 {
tmp = format!("({}{}{})?", rule_42, tmp, rule_31);
}
tmp = format!("({}{}{})", rule_42, tmp, rule_31);
rules.insert(
11,
RefCell::new(Rule {
str_value: "42 31 | 42 11 31".to_string(),
rule: Some(format!("({}{}|{})", rule_42, rule_31, tmp)),
}),
);
rules.insert(
0,
RefCell::new(Rule {
str_value: "8 11".to_string(),
rule: None,
}),
);
let rule_zero = format!("^{}$", rules.get(&0).unwrap().borrow_mut().to_regex(&rules));
let regex_zero = Regex::new(&rule_zero).unwrap();
let mut sum = 0;
for line in file_content[start + 1..file_content.len()].iter() {
if regex_zero.is_match(&line) {
sum += 1;
}
}
assert_eq!(sum, *result_2);
let regex_31 = Regex::new(&format!("^{}", rule_31)).unwrap();
let regex_42 = Regex::new(&format!("^{}", rule_42)).unwrap();
let mut sum = 0;
for line in file_content[start + 1..file_content.len()].iter() {
let mut counter_42 = 0;
let mut rest = line.to_string();
while regex_42.is_match(&rest) {
counter_42 += 1;
let caps = regex_42.captures(&rest).unwrap();
rest = rest.replacen(&caps.get(0).map_or("", |m| m.as_str()).to_string(), "", 1);
}
if counter_42 == 0 {
continue;
}
let mut counter_31 = 0;
while regex_31.is_match(&rest) {
counter_31 += 1;
let caps = regex_31.captures(&rest).unwrap();
rest = rest.replacen(&caps.get(0).map_or("", |m| m.as_str()).to_string(), "", 1);
}
if counter_42 > counter_31 && counter_31 > 0 && rest.is_empty() {
sum += 1;
}
}
assert_eq!(sum, *result_2);
}
Ok(())
}
|
use amethyst::core::{SystemDesc, Transform};
use amethyst::derive::SystemDesc;
use amethyst::ecs::{Join, Read, System, SystemData, World, WriteStorage};
use amethyst::input::{InputHandler, StringBindings};
// You'll have to mark PADDLE_HEIGHT as public in pong.rs
use crate::state::Crab;
#[derive(SystemDesc)]
pub struct CrabSystem;
impl<'s> System<'s> for CrabSystem {
type SystemData = (
WriteStorage<'s, Transform>,
WriteStorage<'s, Crab>,
Read<'s, InputHandler<StringBindings>>,
);
fn run(&mut self, (mut transforms, mut crabs, input): Self::SystemData) {
for (crab, transform) in (&mut crabs, &mut transforms).join() {
if let Some(mv_amount) = input.axis_value("crab") {
let scaled_amount = 10. * mv_amount as f32;
transform.prepend_translation_x(scaled_amount);
crab.x_position += scaled_amount;
}
}
}
}
|
use rustler::types::{atom::Atom, binary::Binary};
use rustler::Encoder;
use rustler::{Env, NifResult, Term};
mod atoms {
rustler::rustler_atoms! {
atom ok;
}
}
pub fn on_load(_env: Env) {}
pub fn atom_to_string<'a>(env: Env<'a>, args: &[Term<'a>]) -> NifResult<Term<'a>> {
let atom_string = args[0].atom_to_string()?;
Ok(atom_string.encode(env))
}
pub fn atom_equals_ok<'a>(env: Env<'a>, args: &[Term<'a>]) -> NifResult<Term<'a>> {
Ok((atoms::ok() == args[0]).encode(env))
}
pub fn binary_to_atom<'a>(env: Env<'a>, args: &[Term<'a>]) -> NifResult<Term<'a>> {
let binary: Binary = args[0].decode()?;
let atom = Atom::from_bytes(env, binary.as_slice())?;
Ok(atom.encode(env))
}
pub fn binary_to_existing_atom<'a>(env: Env<'a>, args: &[Term<'a>]) -> NifResult<Term<'a>> {
let binary: Binary = args[0].decode()?;
let atom = Atom::try_from_bytes(env, binary.as_slice())?;
Ok(atom.encode(env))
}
|
use rand_core::{ RngCore, CryptoRng };
use hacl_star_sys as ffi;
const BASEPOINT: [u8; 32] = [9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
pub const PUBLIC_LENGTH: usize = 32;
pub const SECRET_LENGTH: usize = 32;
define!{
pub struct SecretKey/secretkey(pub [u8; SECRET_LENGTH]);
pub struct PublicKey/publickey(pub [u8; SECRET_LENGTH]);
}
#[inline]
pub fn keypair<R: RngCore + CryptoRng>(mut rng: R) -> (SecretKey, PublicKey) {
let mut sk = [0; SECRET_LENGTH];
let mut pk = [0; SECRET_LENGTH];
rng.fill_bytes(&mut sk);
scalarmult(&mut pk, &sk, &BASEPOINT);
(SecretKey(sk), PublicKey(pk))
}
impl SecretKey {
#[inline]
pub fn get_public(&self) -> PublicKey {
let SecretKey(sk) = self;
let mut pk = [0; 32];
scalarmult(&mut pk, sk, &BASEPOINT);
PublicKey(pk)
}
pub fn exchange(&self, &PublicKey(ref pk): &PublicKey, output: &mut [u8; 32]) {
let SecretKey(sk) = self;
scalarmult(output, sk, pk);
}
}
pub fn scalarmult(mypublic: &mut [u8; 32], secret: &[u8; 32], basepoint: &[u8; 32]) {
unsafe {
ffi::curve25519::Hacl_Curve25519_crypto_scalarmult(
mypublic.as_mut_ptr(),
secret.as_ptr() as _,
basepoint.as_ptr() as _
);
}
}
|
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize)]
pub struct Counter {
pub name: String,
pub count: i32,
}
|
//! Manage the target inspector sub-process
//!
//! The interogation that lading does of the target sub-process is intentionally
//! limited to in-process concerns. For instance, lading is able to measure the
//! bytes written per second to a target becase lading itself is writing the
//! bytes. It's valuable to have further information about the target
//! sub-process and that's the responsibility of the inspector. Consider that
//! you can get a Linux `perf` sample of the target by means of having inspector
//! run an appropriate shell script, or take samples of the target's CPU use.
use std::{
collections::HashMap,
io,
path::PathBuf,
process::{ExitStatus, Stdio},
};
use nix::{
errno::Errno,
sys::signal::{kill, SIGTERM},
unistd::Pid,
};
use serde::Deserialize;
use tokio::{process::Command, sync::broadcast::Receiver};
use tracing::{error, info};
use crate::{
common::{stdio, Output},
signals::Shutdown,
};
#[derive(Debug)]
/// Errors produced by [`Server`]
pub enum Error {
/// Wrapper for [`nix::errno::Errno`]
Errno(Errno),
/// Wrapper for [`std::io::Error`]
Io(io::Error),
}
#[derive(Debug, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
/// Configuration for [`Server`]
pub struct Config {
/// The path to the inspector executable.
pub command: PathBuf,
/// Arguments for the inspector sub-process.
pub arguments: Vec<String>,
/// Environment variables to set for the inspector sub-process. Lading's own
/// environment variables are not propagated to the sub-process.
pub environment_variables: HashMap<String, String>,
/// Manages stderr, stdout of the inspector sub-process.
pub output: Output,
}
#[derive(Debug)]
/// The inspector sub-process server.
///
/// This struct manages a sub-process that can be used to do further examination
/// of the [`crate::target::Server`] by means of operating system facilities. The
/// sub-process is not created until [`Server::run`] is called. It is assumed
/// that only one instance of this struct will ever exist at a time, although
/// there are no protections for that.
pub struct Server {
config: Config,
shutdown: Shutdown,
}
impl Server {
/// Create a new [`Server`] instance
///
/// The inspector `Server` is responsible for investigating the
/// [`crate::target::Server`] sub-process.
///
/// # Errors
///
/// Function will error if the path to the sub-process is not valid or if
/// the path is valid but is not to file executable by this program.
pub fn new(config: Config, shutdown: Shutdown) -> Result<Self, Error> {
Ok(Self { config, shutdown })
}
/// Run this [`Server`] to completion
///
/// This function runs the user supplied program to its completion, or until
/// a shutdown signal is received. Child exit status does not currently
/// propagate. This is less than ideal.
///
/// Target server will use the `broadcast::Sender` passed here to transmit
/// its PID. This PID is passed to the sub-process as the first argument.
///
/// # Errors
///
/// Function will return an error if the underlying program cannot be waited
/// on or will not shutdown when signaled to.
///
/// # Panics
///
/// None are known.
pub async fn run(mut self, mut pid_snd: Receiver<u32>) -> Result<ExitStatus, Error> {
let target_pid = pid_snd
.recv()
.await
.expect("target failed to transmit PID, catastrophic failure");
drop(pid_snd);
let config = self.config;
let mut target_cmd = Command::new(config.command);
let mut environment_variables = config.environment_variables.clone();
environment_variables.insert(String::from("TARGET_PID"), target_pid.to_string());
target_cmd
.stdin(Stdio::null())
.stdout(stdio(&config.output.stdout))
.stderr(stdio(&config.output.stderr))
.env_clear()
.kill_on_drop(true)
.args(config.arguments)
.envs(environment_variables.iter());
let mut target_child = target_cmd.spawn().map_err(Error::Io)?;
let target_wait = target_child.wait();
tokio::select! {
res = target_wait => {
match res {
Ok(status) => {
error!("child exited with status: {}", status);
Ok(status)
}
Err(err) => {
error!("child exited with error: {}", err);
Err(Error::Io(err))
}
}
},
_ = self.shutdown.recv() => {
info!("shutdown signal received");
// Note that `Child::kill` sends SIGKILL which is not what we
// want. We instead send SIGTERM so that the child has a chance
// to clean up.
let pid: Pid = Pid::from_raw(target_child.id().unwrap().try_into().unwrap());
kill(pid, SIGTERM).map_err(Error::Errno)?;
let res = target_child.wait().await.map_err(Error::Io)?;
Ok(res)
}
}
}
}
|
#[doc = "Register `CRCPR` reader"]
pub type R = crate::R<CRCPR_SPEC>;
#[doc = "Register `CRCPR` writer"]
pub type W = crate::W<CRCPR_SPEC>;
#[doc = "Field `CRCPOLY` reader - CRC polynomial register"]
pub type CRCPOLY_R = crate::FieldReader<u16>;
#[doc = "Field `CRCPOLY` writer - CRC polynomial register"]
pub type CRCPOLY_W<'a, REG, const O: u8> = crate::FieldWriterSafe<'a, REG, 16, O, u16>;
impl R {
#[doc = "Bits 0:15 - CRC polynomial register"]
#[inline(always)]
pub fn crcpoly(&self) -> CRCPOLY_R {
CRCPOLY_R::new((self.bits & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15 - CRC polynomial register"]
#[inline(always)]
#[must_use]
pub fn crcpoly(&mut self) -> CRCPOLY_W<CRCPR_SPEC, 0> {
CRCPOLY_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "CRC polynomial register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`crcpr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`crcpr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CRCPR_SPEC;
impl crate::RegisterSpec for CRCPR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`crcpr::R`](R) reader structure"]
impl crate::Readable for CRCPR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`crcpr::W`](W) writer structure"]
impl crate::Writable for CRCPR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CRCPR to value 0x07"]
impl crate::Resettable for CRCPR_SPEC {
const RESET_VALUE: Self::Ux = 0x07;
}
|
use super::id_table::{IdColor, IdTable, ObjectId, Surface};
use super::matrix::{camera::CameraMatrix, model::ModelMatrix};
use super::webgl::{program, ProgramType, WebGlF32Vbo, WebGlI16Ibo, WebGlRenderingContext};
use crate::arena::block::{self, BlockId};
use crate::arena::resource;
use ndarray::Array2;
use std::collections::HashMap;
pub struct Character {
vertex_buffer_xy: WebGlF32Vbo,
vertex_buffer_xz: WebGlF32Vbo,
normal_buffer_xy: WebGlF32Vbo,
normal_buffer_xz: WebGlF32Vbo,
v_color_buffer: WebGlF32Vbo,
id_color_buffer: WebGlF32Vbo,
texture_coord_buffer: WebGlF32Vbo,
index_buffer: WebGlI16Ibo,
}
impl Character {
pub fn new(gl: &WebGlRenderingContext) -> Self {
let vertex_buffer_xy = gl.create_vbo_with_f32array(
&[
[0.5, 0.5, 1.0 / 128.0],
[-0.5, 0.5, 1.0 / 128.0],
[0.5, -0.5, 1.0 / 128.0],
[-0.5, -0.5, 1.0 / 128.0],
]
.concat(),
);
let vertex_buffer_xz = gl.create_vbo_with_f32array(
&[
[0.5, 0.0, 1.0],
[-0.5, 0.0, 1.0],
[0.5, 0.0, 0.0],
[-0.5, 0.0, 0.0],
]
.concat(),
);
let normal_buffer_xy = gl.create_vbo_with_f32array(
&[
[0.0, 0.0, 1.0],
[0.0, 0.0, 1.0],
[0.0, 0.0, 1.0],
[0.0, 0.0, 1.0],
]
.concat(),
);
let normal_buffer_xz = gl.create_vbo_with_f32array(
&[
[0.0, 1.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 1.0, 0.0],
]
.concat(),
);
let v_color_buffer = gl.create_vbo_with_f32array(
&[
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
]
.concat(),
);
let id_color_buffer = gl.create_vbo_with_f32array(
&[
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
]
.concat(),
);
let texture_coord_buffer =
gl.create_vbo_with_f32array(&[[1.0, 1.0], [0.0, 1.0], [1.0, 0.0], [0.0, 0.0]].concat());
let index_buffer = gl.create_ibo_with_i16array(&[0, 1, 2, 3, 2, 1]);
Self {
vertex_buffer_xy,
vertex_buffer_xz,
normal_buffer_xy,
normal_buffer_xz,
id_color_buffer,
v_color_buffer,
texture_coord_buffer,
index_buffer,
}
}
pub fn render(
&self,
gl: &mut WebGlRenderingContext,
id_table: &mut IdTable,
id_value: &mut HashMap<BlockId, IdColor>,
camera: &CameraMatrix,
vp_matrix: &Array2<f32>,
block_arena: &block::Arena,
resource_arena: &resource::Arena,
character_ids: impl Iterator<Item = BlockId>,
grabbed_object_id: &ObjectId,
) {
let characters = block_arena
.iter_map_with_ids(
character_ids.filter(|x| !grabbed_object_id.eq(x)),
|character_id, character: &block::character::Character| {
let size = character.size();
let pos = character.position().clone();
let tex_height = character.current_tex_height();
let tex_size = character
.current_tex_id()
.and_then(|tex_id| resource_arena.get_as::<resource::ImageData>(tex_id))
.map(|img| img.size().clone());
let id = id_table.len() as u32 | 0xFF000000;
id_table.insert(
IdColor::from(id),
ObjectId::Character(
BlockId::clone(&character_id),
Surface {
r: pos.clone(),
s: [1.0, 0.0, 0.0],
t: [0.0, 1.0, 0.0],
},
),
);
id_value.insert(BlockId::clone(&character_id), IdColor::from(id));
(id as i32, size, pos, tex_height, tex_size)
},
)
.collect::<Vec<_>>();
gl.use_program(ProgramType::UnshapedProgram);
gl.set_a_vertex(&self.vertex_buffer_xy, 3, 0);
gl.set_a_normal(&self.normal_buffer_xy, 3, 0);
gl.set_a_texture_coord(&self.texture_coord_buffer, 2, 0);
gl.set_a_id_color(&self.id_color_buffer, 4, 0);
gl.set_a_v_color(&self.v_color_buffer, 4, 0);
gl.bind_buffer(
web_sys::WebGlRenderingContext::ELEMENT_ARRAY_BUFFER,
Some(&self.index_buffer),
);
gl.set_u_shape(program::SHAPE_2D_CIRCLE);
gl.set_u_bg_color_1(program::COLOR_NONE);
gl.set_u_bg_color_2(program::COLOR_NONE);
gl.set_u_id(program::ID_U_WRITE);
gl.set_u_texture_0(program::TEXTURE_NONE);
gl.set_u_texture_1(program::TEXTURE_NONE);
gl.set_u_texture_2(program::TEXTURE_NONE);
gl.set_u_light(program::LIGHT_NONE);
gl.depth_func(web_sys::WebGlRenderingContext::LEQUAL);
for (id, size, pos, _, _) in &characters {
let model_matrix: Array2<f32> = ModelMatrix::new()
.with_scale(&[*size, *size, 1.0])
.with_movement(pos)
.into();
let mvp_matrix = vp_matrix.dot(&model_matrix);
gl.set_u_translate(mvp_matrix.reversed_axes());
gl.set_u_id_value(*id);
gl.draw_elements_with_i32(
web_sys::WebGlRenderingContext::TRIANGLES,
6,
web_sys::WebGlRenderingContext::UNSIGNED_SHORT,
0,
);
}
gl.set_a_vertex(&self.vertex_buffer_xz, 3, 0);
gl.set_a_normal(&self.normal_buffer_xz, 3, 0);
gl.set_u_shape(program::SHAPE_2D_BOX);
for (id, _, pos, tex_height, tex_size) in &characters {
if let Some(tex_size) = tex_size.as_ref() {
let model_matrix: Array2<f32> = ModelMatrix::new()
.with_scale(&[*tex_height * tex_size[0] / tex_size[1], 1.0, *tex_height])
.with_x_axis_rotation(camera.x_axis_rotation() - std::f32::consts::FRAC_PI_2)
.with_z_axis_rotation(camera.z_axis_rotation())
.with_movement(pos)
.into();
let mvp_matrix = vp_matrix.dot(&model_matrix);
gl.set_u_translate(mvp_matrix.reversed_axes());
gl.set_u_id_value(*id);
gl.draw_elements_with_i32(
web_sys::WebGlRenderingContext::TRIANGLES,
6,
web_sys::WebGlRenderingContext::UNSIGNED_SHORT,
0,
);
}
}
}
}
|
//! Types that are commone between different operations.
use std::fmt;
#[allow(clippy::trivially_copy_pass_by_ref)]
fn pretty_on(pretty_print: &bool) -> bool {
*pretty_print
}
/// [Standard Query Parameters](https://cloud.google.com/storage/docs/json_api/v1/parameters#query)
/// can be used in almost any API request to GCS
#[derive(Serialize, Default)]
#[serde(rename_all = "camelCase")]
pub struct StandardQueryParameters<'a> {
/// Selector specifying a subset of fields to include in the response,
/// the primary use of this is for better performance and lower response
/// sizes.
/// For more information, see the [partial response](https://cloud.google.com/storage/docs/json_api/v1/how-tos/performance#partial)
/// documentation.
#[serde(skip_serializing_if = "Option::is_none")]
pub fields: Option<&'a str>,
/// Returns the response in a human-readable format, with indentations and
/// line breaks, if true. Note that while the default value is `true` for
/// GCP, this crate uses a default of `false`
#[serde(skip_serializing_if = "pretty_on")]
pub pretty_print: bool,
/// Lets you enforce per-user quotas from a server-side application even
/// in cases when the user's IP address is unknown. This can occur, for
/// example, with applications that run cron jobs on App Engine on a
/// user's behalf. You can choose any arbitrary string that uniquely
/// identifies a user, but it is limited to 40 characters. Overrides
/// `userIp` if both are provided. See more about [Capping API usage](https://cloud.google.com/apis/docs/capping-api-usage)
#[serde(skip_serializing_if = "Option::is_none")]
pub quota_user: Option<&'a str>,
/// Lets you enforce per-user quotas when calling the API from a server-side application.
/// See more about [Capping API usage](https://cloud.google.com/apis/docs/capping-api-usage)
#[serde(skip_serializing_if = "Option::is_none")]
pub user_ip: Option<&'a str>,
}
/// Contains common conditionals that determite whether an operation
/// will actually proceed or not
#[derive(Default, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Conditionals {
/// Makes the operation conditional on whether the object's current
/// generation matches the given value. Setting to 0 makes the
/// operation succeed only if there are no live versions of the object.
#[serde(skip_serializing_if = "Option::is_none")]
pub if_generation_match: Option<i64>,
/// Makes the operation conditional on whether the object's current
/// generation does not match the given value. If no live object exists,
/// the precondition fails. Setting to 0 makes the operation succeed only
/// if there is a live version of the object.
#[serde(skip_serializing_if = "Option::is_none")]
pub if_generation_not_match: Option<i64>,
/// Makes the operation conditional on whether the object's current
/// metageneration matches the given value.
#[serde(skip_serializing_if = "Option::is_none")]
pub if_metageneration_match: Option<i64>,
/// Makes the operation conditional on whether the object's current
/// metageneration does not match the given value.
#[serde(skip_serializing_if = "Option::is_none")]
pub if_metageneration_not_match: Option<i64>,
}
/// [Storage classes](https://cloud.google.com/storage/docs/storage-classes)
#[derive(Serialize, Deserialize, Copy, Clone, Debug, PartialEq, Eq)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum StorageClass {
/// [Multi-Regional Storage](https://cloud.google.com/storage/docs/storage-classes#multi-regional)
/// is appropriate for storing data that is frequently accessed ("hot" objects), such as serving
/// website content, interactive workloads, or data supporting mobile and gaming applications.
/// Multi-Regional Storage data has the most availability compared to other storage classes.
MultiRegional,
/// [Regional Storage](https://cloud.google.com/storage/docs/storage-classes#regional) enables
/// you to store data at lower cost, with the trade-off of data being stored in a specific
/// regional location, instead of having redundancy distributed over a large geographic area.
Regional,
/// [Nearline Storage](https://cloud.google.com/storage/docs/storage-classes#nearline) is a
/// low-cost, highly durable storage service for storing infrequently accessed data.
/// Nearline Storage is a better choice than Multi-Regional Storage or Regional Storage
/// in scenarios where slightly lower availability, a 30-day minimum storage duration,
/// and costs for data access are acceptable trade-offs for lowered storage costs.
Nearline,
/// [Coldline Storage](https://cloud.google.com/storage/docs/storage-classes#coldline)
/// is a very-low-cost, highly durable storage service for data archiving, online backup,
/// and disaster recovery. Unlike other "cold" storage services, your data is available
/// within milliseconds, not hours or days.
Coldline,
/// Users that create a bucket without specifying a default storage class see the bucket's
/// default storage class listed as [Standard Storage](https://cloud.google.com/storage/docs/storage-classes#standard)
/// in the API. Objects created without a storage class in such a bucket are also listed
/// as Standard Storage in the API. Standard Storage is equivalent to Multi-Regional
/// Storage when the associated bucket is located in a multi-regional location. Standard
/// Storage is equivalent to Regional Storage when the associated bucket is located in a
/// regional location.
Standard,
/// It is recommended that users utilize Regional Storage in place of [Durable Reduced Availability (DRA)](https://cloud.google.com/storage/docs/storage-classes#dra).
/// Regional Storage has lower pricing for operations, but otherwise the same price structure.
/// Regional Storage also has better performance, particularly in terms of availability
/// (DRA has a 99% availability SLA).
DurableReducedAvailability,
}
impl fmt::Display for StorageClass {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
/// A [predefined or "canned" ACL](https://cloud.google.com/storage/docs/access-control/lists#predefined-acl)
/// is an alias for a set of specific ACL entries that you can use to quickly apply many ACL entries at once
/// to a bucket or object. Predefined ACLs are defined for common scenarios such as revoking all access
/// permissions except for owner permission (predefined ACL private), or making an object publicly readable
/// (predefined ACL publicRead).
#[derive(Serialize, Copy, Clone, Debug, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub enum PredefinedAcl {
/// Object owner gets OWNER access, and allAuthenticatedUsers get READER access.
AuthenticatedRead,
/// Object owner gets OWNER access, and project team owners get OWNER access.
BucketOwnerFullControl,
/// Object owner gets OWNER access, and project team owners get READER access.
BucketOwnerRead,
/// Object owner gets OWNER access.
Private,
/// Object owner gets OWNER access, and project team members get access according to their roles.
ProjectPrivate,
/// Object owner gets OWNER access, and allUsers get READER access.
PublicRead,
}
/// Set of properties to return. Defaults to `NoAcl`.
#[derive(Serialize, Copy, Clone, Debug, PartialEq, Eq, Default)]
#[serde(rename_all = "camelCase")]
pub enum Projection {
/// Include all properties.
Full,
/// Omit the owner, acl property.
#[default]
NoAcl,
}
|
#[cfg(windows)]
extern crate ws2_32;
#[cfg(windows)]
use std::mem;
use std::ffi::{CStr, CString};
use libc::c_char;
use std::ptr;
#[cfg(windows)]
pub fn other_init() {
// libtls currently (2.3.1) fails to initialize the
// windows network stack - issue #167
unsafe {
let mut data = mem::zeroed();
ws2_32::WSAStartup(0x202, &mut data);
}
}
#[cfg(not(windows))]
pub fn other_init() {}
pub fn from_cstr(s: *const c_char) -> String {
unsafe {
if s == ptr::null_mut() {
String::new()
} else {
let slice = CStr::from_ptr(s);
String::from_utf8_lossy(slice.to_bytes()).into_owned()
}
}
}
/// Get C string ptr, but use NULL if the string is empty.
/// Because some C functions treat NULL and "\0" differently
pub fn str_c_ptr(s: &str) -> *const i8 {
if s.is_empty() {
ptr::null()
} else {
unsafe { CString::from_vec_unchecked(s.bytes().collect()).as_ptr() }
}
}
|
use actix_web::HttpResponse;
use super::super::domain::job::DomainJob;
use serde_derive::{
Deserialize,
Serialize,
};
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct Job {
id: i32,
company_name: String,
company_logo: String,
company_thumbnail: String,
title: String,
title_sub: String,
tag: String,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct JobIndex {
jobs: Vec<Job>,
}
pub fn response(domain_jobs: &Vec<DomainJob>) -> HttpResponse {
let mut jobs = Vec::with_capacity(domain_jobs.len());
for domain_job in domain_jobs {
jobs.push(Job {
id: *domain_job.id(),
company_name: domain_job.company_name().to_string(),
company_logo: domain_job.company_logo().to_string(),
company_thumbnail: domain_job.company_thumbnail().to_string(),
title: domain_job.title().to_string(),
title_sub: domain_job.title_sub().to_string(),
tag: domain_job.tag().to_string(),
});
}
HttpResponse::Ok().json(JobIndex {
jobs,
})
}
|
use crate::config;
use crate::util::*;
use std::fs;
pub fn run() {
let name = if args_len() < 3 {
choose_note()
} else {
get_argument(2)
};
if !get_confirmation("Confirm") {
println!("Notes {} was not removed.", name);
return
}
match fs::remove_file(format!("{}{}{}", &config::directory(), name, config::EXTENSION)) {
Ok(_) => println!("Note {} removed.", name),
Err(_) => println!("Failed to remove note."),
};
}
|
use std::process;
use crate::util::Newline;
use crate::util::CmpType;
use crate::util::ScanIter;
use crate::util::char;
#[derive(Debug, PartialEq, Clone)]
pub enum Token {
LeftParen,
RightParen,
LeftBrace,
RightBrace,
DollarSign,
Slash,
Comma,
Bang,
BangEqual,
Equal,
EqualEqual,
Identifier(String),
String(String),
Number(isize),
Fun,
Cmd,
Main,
Return,
Let,
If,
While,
For,
Newline,
Eof,
}
impl CmpType for Token {}
impl Newline for Token {
fn is_newline(&self) -> bool {
*self == Token::Newline
}
}
macro_rules! scan_error {
($line: expr, $($arg:tt)*) => {
eprint!("Scan error at line {}: ", $line);
eprintln!($($arg)*);
process::exit(1);
}
}
pub fn scan(src: Vec<char>) -> Vec<Token> {
let mut src = ScanIter::new(src);
let mut tokens: Vec<Token> = Vec::new();
loop {
let token = scan_token(&mut src);
tokens.push(token);
if src.is_at_end() {
break;
}
}
tokens
}
fn scan_token(src: &mut ScanIter<char>) -> Token {
if src.is_at_end() {
return Token::Eof;
}
let line = src.line();
let c = src.peek().clone();
src.advance();
match c {
'(' => Token::LeftParen,
')' => Token::RightParen,
'{' => Token::LeftBrace,
'}' => Token::RightBrace,
'$' => Token::DollarSign,
',' => Token::Comma,
'!' => if src.match_item('=') {
Token::BangEqual
} else {
Token::Bang
},
'=' => if src.match_item('=') {
Token::EqualEqual
} else {
Token::Equal
},
'/' => {
if src.match_item('/') {
while *src.peek() != '\n' && !src.is_at_end() {
src.advance();
}
scan_token(src)
} else {
Token::Slash
}
},
// skip whitespaces
' '|'\r'|'\t' => scan_token(src),
'\n' => Token::Newline,
'"' => scan_string(src),
_ => {
if char::is_digit(c) {
scan_number(src)
} else if char::is_alpha_or_underscore(c) {
scan_identifier_or_keyword(src)
} else {
scan_error!(line, "unexpected character '{}'", c);
}
}
}
}
fn scan_string(src: &mut ScanIter<char>) -> Token {
let mut s = String::new();
let mut c = *src.peek();
while c != '"' && !src.is_at_end() {
if c == '\n' {
scan_error!(src.line(), "do not support multi-line string");
}
s.push(c);
src.advance();
c = *src.peek();
}
if src.is_at_end() {
scan_error!(src.line(), "unterminated string");
}
// Closing ".
src.advance();
Token::String(s)
}
fn scan_number(src: &mut ScanIter<char>) -> Token {
let mut s = String::new();
let mut c = *src.peek();
s.push(*src.previous());
while char::is_digit(c) {
s.push(c);
src.advance();
c = *src.peek();
}
Token::Number(s.parse().unwrap())
}
fn scan_identifier_or_keyword(src: &mut ScanIter<char>) -> Token {
let mut s = String::new();
let mut c = *src.peek();
s.push(*src.previous());
while char::is_alpha_numeric_or_underscore(c) {
s.push(c);
src.advance();
c = *src.peek();
}
if c == '!' {
s.push(c);
src.advance();
return Token::Identifier(s);
}
match s.as_ref() {
"fun" => Token::Fun,
"cmd" => Token::Cmd,
"let" => Token::Let,
"return" => Token::Return,
_ => Token::Identifier(s),
}
}
|
use std::io::{Write};
use std::fs::File;
use lib::{Node};
use parser_lib;
use asm_lib as asm;
// $ gcc -m32 -masm=intel main.s -o main
pub fn gen(path: & String, filename: & String) {
// println!("----------------");
let ast: Vec<Node::Node> = parser_lib::parse(&path, &filename);
let mut f = File::create(&filename).expect("Gen: Unable to create the file.");
// Pprint::print_ast(&ast, 1);
// println!("\n[+] Start generating assembly.\n");
analyze_ast(& ast, 0, 0, &mut f);
// println!("\n[+] Finish generating assembly.\n");
// Pprint::print_asm(&filename);
// println!("----------------");
// println!("Generated file: {}", filename);
}
fn analyze_ast(ast: & Vec<Node::Node>, me: usize, from: usize, mut f: &mut File) {
match ast[me]._level.as_str() {
"Program" => program(&ast, me, from, &mut f),
"Function" => function(&ast, me, from, &mut f),
"Statement" => statement(&ast, me, from, &mut f),
_ => panic!("Gen: Unrecoginized AST node."),
}
}
fn program(ast: & Vec<Node::Node>, me: usize, from: usize, mut f: &mut File) {
// intel syntax
// println!(" .intel_syntax noprefix");
// print!(" ");
// print!(".file ");
// println!("\"{}\"", ast[me]._name);
write!(f, " .intel_syntax noprefix\n").expect("Gen: generator program: Unable to write to the file.");
write!(f, " ").expect("Gen: generator program: Unable to write to the file.");
write!(f, ".file ").expect("Gen: generator program: Unable to write to the file.");
write!(f, "\"{}\"\n", ast[me]._name).expect("Gen: generator program: Unable to write to the file.");
for i in ast[me].to.iter() {
let to = *i;
analyze_ast(& ast, to, me, &mut f);
}
}
fn function(ast: & Vec<Node::Node>, me: usize, from: usize, mut f: &mut File) {
// print!(" ");
// print!(".globl ");
// println!("{}", ast[me]._name);
// println!("{}:", ast[me]._name);
write!(f, " ").expect("Gen: generator func: Unable to write to the file.");
write!(f, ".globl ").expect("Gen: generator func: Unable to write to the file.");
write!(f, "{}\n", ast[me]._name).expect("Gen: generator func: Unable to write to the file.");
write!(f, "{}:\n", ast[me]._name).expect("Gen: generator func: Unable to write to the file.");
for i in ast[me].to.iter() {
let to = *i;
analyze_ast(& ast, to, me, &mut f);
}
}
fn statement(ast: & Vec<Node::Node>, me: usize, from: usize, mut f: &mut File) {
match ast[me]._type.as_str() {
"RETURN_KEYWORD" => stat_return(&ast, me, from, &mut f),
_ => (),
}
}
fn stat_return(ast: & Vec<Node::Node>, me: usize, from: usize, mut f: &mut File) {
let ret_val = exp(&ast, ast[me].to[0], me);
asm::mov("eax", ret_val, &mut f);
// print!(" ");
// println!("ret");
write!(f, " ").expect("Gen: stat_return: Unable to write to the file.");
write!(f, "ret\n").expect("Gen: stat_return: Unable to write to the file.");
}
fn exp(ast: & Vec<Node::Node>, me: usize, from: usize) -> &str {
ast[me]._value.as_str()
}
|
use ggez::{graphics, Context, GameResult};
use ggez::event::EventHandler;
use rand::Rng;
use std::fs::File;
use std::io::{Read};
#[derive(Clone)]
struct Vector2D {
x: f32,
y: f32
}
impl Vector2D {
pub fn new(x: f32, y: f32) -> Self {
Self {
x,
y
}
}
}
impl Into<ggez::mint::Point2<f32>> for Vector2D {
fn into(self) -> ggez::mint::Point2<f32> {
ggez::mint::Point2::<f32> {
x: self.x,
y: self.y
}
}
}
struct Cell {
mesh: graphics::Mesh,
}
impl Cell {
fn new(mesh: graphics::Mesh) -> Self {
Self {
mesh,
}
}
}
struct NumberText {
text: graphics::Text
}
impl NumberText {
pub fn new(text: graphics::Text) -> Self {
Self {
text
}
}
}
struct HeadNode {
mesh: graphics::Mesh,
start_row: u8,
location: [u8; 2],
path: Vec<[u8; 2]>,
locked_location: [u8; 2],
game_not_finished: bool
}
impl HeadNode {
pub fn new(mesh: graphics::Mesh) -> Self {
let mut path: Vec<[u8; 2]> = Vec::new();
path.push([0, 0]);
Self {
mesh,
start_row: 0,
location: [0, 0],
path,
locked_location: [10, 10],
game_not_finished: true
}
}
pub fn new_move(&mut self, table: [u8; 2], numbers: &mut Vec<Vec<u8>>, menu_sum_numbers: &mut Vec<u8>, current_in_rows_cells_location: &mut Vec<Vec<[u8; 2]>>) -> [u8; 2] {
println!("Head cell path : {:?}", self.path);
if self.location[0] as i8 - 1 > -1 && self.location[1] + 1 < table[1] && self.locked_location != [self.location[0] - 1, self.location[1] + 1] && self.locked_location != [self.location[0], self.location[1] + 1] && self.locked_location != [self.location[0] + 1, self.location[1] + 1] {
self.location[0] -= 1;
self.location[1] += 1;
self.path.push([self.location[0], self.location[1]]);
menu_sum_numbers.push(*numbers.
get_mut(self.location[0] as usize).
unwrap().
get_mut(self.location[1] as usize).
unwrap()
);
current_in_rows_cells_location[self.start_row as usize].push(self.location);
} else if self.location[1] + 1 < table[1] && self.locked_location != [self.location[0], self.location[1] + 1] && self.locked_location != [self.location[0] + 1, self.location[1] + 1] {
self.location[1] += 1;
self.path.push([self.location[0], self.location[1]]);
menu_sum_numbers.push(*numbers.
get_mut(self.location[0] as usize).
unwrap().
get_mut(self.location[1] as usize).
unwrap()
);
current_in_rows_cells_location[self.start_row as usize].push(self.location);
} else if self.location[0] + 1 < table[0] && self.location[1] + 1 < table[1] && self.locked_location != [self.location[0] + 1, self.location[1] + 1] {
self.location[0] += 1;
self.location[1] += 1;
self.path.push([self.location[0], self.location[1]]);
menu_sum_numbers.push(*numbers.
get_mut(self.location[0] as usize).
unwrap().
get_mut(self.location[1] as usize).
unwrap()
);
current_in_rows_cells_location[self.start_row as usize].push(self.location);
} else {
self.locked_location = [self.location[0], self.location[1]];
self.path.pop().expect("Can't pop from path");
menu_sum_numbers.pop().expect("Can't pop from menu_sum_numbers");
current_in_rows_cells_location[self.start_row as usize].pop().expect("Can't pop from maximum_in_rows_cells_location");
if self.path.len() != 0 {
self.location = *self.path.last().unwrap();
} else {
if self.start_row == table[0] - 1 {
self.game_not_finished = false;
} else {
self.start_row += 1;
self.location = [self.start_row, 0];
self.path.push([self.start_row, 0]);
menu_sum_numbers.push(*numbers.
get_mut(self.location[0] as usize).
unwrap().
get_mut(self.location[1] as usize).
unwrap()
);
}
}
}
self.location
}
}
pub struct GoldMiner {
table: [u8; 2],
menu_background: graphics::Mesh,
menu_texts: Vec<NumberText>,
menu_sum_texts: Vec<NumberText>,
menu_max_text: NumberText,
menu_sum_numbers: Vec<u8>,
menu_lines: Vec<graphics::Mesh>,
cells: Vec<Vec<Cell>>,
cell_width: f32,
cell_height: f32,
borders: Vec<Vec<Cell>>,
numbers: Vec<Vec<u8>>,
texts: Vec<Vec<NumberText>>,
head_node: HeadNode,
maximum_in_rows: Vec<u16>,
current_in_rows_cells_location: Vec<Vec<[u8; 2]>>,
maximum_in_rows_cells_location: Vec<Vec<[u8; 2]>>,
current_milisec: u64,
next_milisec: u64,
refresh_rate_in_miliseconds: u64,
game_not_finished: bool
}
impl GoldMiner {
pub fn new(context: &mut Context) -> Self {
let game_not_finished: bool = true;
let mut settings = String::new();
File::open("./settings.conf").expect("Error opening the settings.conf file").read_to_string(&mut settings).expect("Error reading to string");
let settings_rows = settings.split('\n').collect::<Vec<&str>>();
let table_size = settings_rows.get(0)
.unwrap()
.split(':').collect::<Vec<&str>>().get(1).unwrap().split('x').collect::<Vec<&str>>();
let mut table: [u8; 2] = [0; 2];
table[0] = table_size.get(0).unwrap().parse::<u8>().unwrap();
table[1] = table_size.get(1).unwrap().parse::<u8>().unwrap();
let max_number = settings_rows
.get(1)
.unwrap()
.split(':')
.collect::<Vec<&str>>();
let max_number = max_number.get(1).unwrap();
let max_number = max_number.parse::<u8>().unwrap();
let refresh_rate_in_miliseconds = settings_rows
.get(2)
.unwrap()
.split(':')
.collect::<Vec<&str>>();
let refresh_rate_in_miliseconds = refresh_rate_in_miliseconds.get(1).unwrap();
let refresh_rate_in_miliseconds: u64 = refresh_rate_in_miliseconds.parse::<u64>().unwrap();
let wait_then_solve_in_miliseconds = settings_rows
.get(3)
.unwrap()
.split(':')
.collect::<Vec<&str>>();
let wait_then_solve_in_miliseconds = wait_then_solve_in_miliseconds.get(1).unwrap();
let wait_then_solve_in_miliseconds: u64 = wait_then_solve_in_miliseconds.parse::<u64>().unwrap();
let cell_width = 720.0 / table[1] as f32;
let cell_height = 720.0 / table[0] as f32;
let rect = graphics::Rect::new(0.0, 0.0, 560.0, 720.0);
let menu_background = graphics::Mesh::new_rectangle(context, graphics::DrawMode::fill(), rect, graphics::Color::new(0.2, 0.2, 0.2, 1.0)).unwrap();
let mut menu_texts: Vec<NumberText> = Vec::new();
let mut menu_sum_texts: Vec<NumberText> = Vec::new();
let menu_max_text: NumberText = NumberText::new(
graphics::Text::new(
graphics::TextFragment::new(
"Final Maximum : "
).
font(graphics::Font::default()).
color(graphics::Color::new(1.0, 1.0, 0.0, 1.0)).
scale(graphics::PxScale::from(15.0))
)
);
let mut menu_sum_numbers: Vec<u8> = Vec::new();
let mut menu_lines: Vec<graphics::Mesh> = Vec::new();
let mut cells: Vec<Vec<Cell>> = Vec::new();
let mut cell_rect = graphics::Rect::new(0.0, 0.0, cell_width, cell_height);
let mut borders: Vec<Vec<Cell>> = Vec::new();
let mut numbers: Vec<Vec<u8>> = Vec::new();
let mut texts: Vec<Vec<NumberText>> = Vec::new();
for i in 0..table[0] {
let mut color = graphics::Color::new(1.0, 0.0, 0.0, 1.0);
match i {
0 => {
color = graphics::Color::new(1.0, 0.0, 0.0, 1.0);
},
1 => {
color = graphics::Color::new(1.0, 0.5, 0.0, 1.0);
},
2 => {
color = graphics::Color::new(0.7, 0.7, 0.0, 1.0);
},
3 => {
color = graphics::Color::new(0.0, 1.0, 0.0, 1.0);
},
4 => {
color = graphics::Color::new(0.0, 0.0, 1.0, 1.0);
},
5 => {
color = graphics::Color::new(0.5, 0.0, 1.0, 1.0);
},
6 => {
color = graphics::Color::new(1.0, 0.0, 1.0, 1.0);
},
7 => {
color = graphics::Color::new(1.0, 1.0, 1.0, 1.0);
},
8 => {
color = graphics::Color::new(0.5, 0.5, 0.5, 1.0);
},
9 => {
color = graphics::Color::new(0.0, 0.0, 0.0, 1.0);
},
_ => {}
}
menu_texts.push(NumberText::new(
graphics::Text::new(
graphics::TextFragment::new(
"Max : 0 ->"
).
font(graphics::Font::default()).
color(color).
scale(graphics::PxScale::from(15.0)))
));
}
for i in 0..table[0] + 1 {
menu_lines.push(graphics::Mesh::new_line(context, &[Vector2D::new(0.0, i as f32 * 720.0 / table[0] as f32), Vector2D::new(560.0, i as f32 * 720.0 / table[0] as f32)], 2.0, graphics::Color::WHITE).unwrap());
}
menu_lines.push(graphics::Mesh::new_line(context, &[Vector2D::new(0.0, 0.0), Vector2D::new(0.0, 720.0)], 2.0, graphics::Color::WHITE).unwrap());
for i in 0..table[0] as usize {
cells.push(Vec::new());
cell_rect.y = i as f32 * cell_height;
for j in 0..table[1] as usize {
cell_rect.x = 560.0 + j as f32 * cell_width;
cells.get_mut(i).unwrap().push(Cell::new(
graphics::Mesh::new_rectangle(context,
graphics::DrawMode::fill(),
cell_rect,
graphics::Color::new(0.2, 0.2, 0.2, 1.0)
).unwrap()
));
}
}
for i in 0..table[0] as usize {
borders.push(Vec::new());
cell_rect.y = i as f32 * cell_height;
for j in 0..table[1] as usize {
cell_rect.x = 560.0 + j as f32 * cell_width;
borders.get_mut(i).unwrap().push(Cell::new(
graphics::Mesh::new_rectangle(context,
graphics::DrawMode::stroke(2.0),
cell_rect,
graphics::Color::WHITE
).unwrap()
));
}
}
for i in 0..table[0] as usize {
texts.push(Vec::new());
numbers.push(Vec::new());
for _j in 0..table[1] as usize {
let random_number: u8 = rand::thread_rng().gen_range(0..max_number);
numbers.get_mut(i).unwrap().push(random_number);
texts.get_mut(i).unwrap().push(NumberText::new(
graphics::Text::new(
graphics::TextFragment::new(
random_number.
to_string()
).
font(graphics::Font::default()).
color(graphics::Color::new(1.0, 1.0, 0.0, 1.0)).
scale(graphics::PxScale::from(40.0)))
));
}
}
let head_node = HeadNode::new(
graphics::Mesh::new_circle(
context,
graphics::DrawMode::fill(),
Vector2D::new(576.0 + cell_width / 2.5, cell_height / 1.2),
3.0,
0.5,
graphics::Color::new(1.0, 0.0, 0.0, 1.0)
).unwrap()
);
let mut maximum_in_rows: Vec<u16> = Vec::new();
let mut current_in_rows_cells_location: Vec<Vec<[u8; 2]>> = Vec::new();
let mut maximum_in_rows_cells_location: Vec<Vec<[u8; 2]>> = Vec::new();
for _index in 0..table[0] as usize {
maximum_in_rows.push(0);
current_in_rows_cells_location.push(Vec::new());
maximum_in_rows_cells_location.push(Vec::new());
}
for index in 0..table[0] as usize {
current_in_rows_cells_location[index].push([index as u8, 0]);
}
texts.
get_mut(0).
unwrap().
get_mut(0).
unwrap().
text = graphics::Text::new(
graphics::TextFragment::new(numbers.
get_mut(0).
unwrap().
get_mut(0).
unwrap().
to_string()
).
font(graphics::Font::default()).
color(graphics::Color::new(1.0, 0.0, 0.0, 1.0)).
scale(graphics::PxScale::from(40.0))
);
menu_sum_texts.push(NumberText::new(
graphics::Text::new(
graphics::TextFragment::new(
format!("{} + ", numbers.get(0).unwrap().get(0).unwrap().to_string())
).
font(graphics::Font::default()).
color(graphics::Color::new(1.0, 0.0, 0.0, 1.0)).
scale(graphics::PxScale::from(15.0)))
));
menu_sum_numbers.push(*numbers.get(0).unwrap().get(0).unwrap());
Self {
table,
menu_background,
menu_texts,
menu_sum_texts,
menu_max_text,
menu_sum_numbers,
menu_lines,
cells,
cell_width,
cell_height,
borders,
numbers,
texts,
head_node,
maximum_in_rows,
current_in_rows_cells_location,
maximum_in_rows_cells_location,
current_milisec: wait_then_solve_in_miliseconds,
next_milisec: 0,
refresh_rate_in_miliseconds,
game_not_finished
}
}
fn update_logic(&mut self, context: &mut Context) {
self.next_milisec = ggez::timer::time_since_start(context).as_millis() as u64;
if self.current_milisec < self.next_milisec {
self.current_milisec = self.next_milisec + self.refresh_rate_in_miliseconds;
if self.game_not_finished {
if self.head_node.game_not_finished {
let new_location = self.head_node.new_move(self.table, &mut self.numbers, &mut self.menu_sum_numbers, &mut self.current_in_rows_cells_location);
let mut color = graphics::Color::new(1.0, 0.0, 0.0, 1.0);
match self.head_node.start_row {
0 => {
color = graphics::Color::new(1.0, 0.0, 0.0, 1.0);
},
1 => {
color = graphics::Color::new(1.0, 0.5, 0.0, 1.0);
},
2 => {
color = graphics::Color::new(1.0, 1.0, 0.0, 1.0);
},
3 => {
color = graphics::Color::new(0.0, 1.0, 0.0, 1.0);
},
4 => {
color = graphics::Color::new(0.0, 0.0, 1.0, 1.0);
},
5 => {
color = graphics::Color::new(0.5, 0.0, 1.0, 1.0);
},
6 => {
color = graphics::Color::new(1.0, 0.0, 1.0, 1.0);
},
7 => {
color = graphics::Color::new(1.0, 1.0, 1.0, 1.0);
},
8 => {
color = graphics::Color::new(0.5, 0.5, 0.5, 1.0);
},
9 => {
color = graphics::Color::new(0.0, 0.0, 0.0, 1.0);
},
_ => {}
}
let mut maximum = String::new();
maximum.push_str("Max : ");
maximum.push_str(self.maximum_in_rows[self.head_node.start_row as usize].to_string().as_str());
maximum.push_str(" ->");
self.menu_texts.
get_mut(self.head_node.start_row as usize).
unwrap().
text = graphics::Text::new(
graphics::TextFragment::new(
maximum
).
font(graphics::Font::default()).
color(color).
scale(graphics::PxScale::from(15.0))
);
self.texts.
get_mut(new_location[0] as usize).
unwrap().
get_mut(new_location[1] as usize).
unwrap().
text = graphics::Text::new(
graphics::TextFragment::new(self.numbers.
get_mut(new_location[0] as usize).
unwrap().
get_mut(new_location[1] as usize).
unwrap().
to_string()
).
font(graphics::Font::default()).
color(graphics::Color::new(1.0, 0.0, 0.0, 1.0)).
scale(graphics::PxScale::from(40.0))
);
self.head_node.mesh = graphics::Mesh::new_circle(
context,
graphics::DrawMode::fill(),
Vector2D::new(576.0 + new_location[1] as f32 * self.cell_width + self.cell_width / 2.5, new_location[0] as f32 * self.cell_height + self.cell_height / 1.2),
3.0,
0.5,
color
).unwrap();
self.menu_sum_texts.pop();
let mut new_menu_sum_text = String::new();
for number in self.menu_sum_numbers.iter() {
new_menu_sum_text.push_str(number.to_string().as_str());
new_menu_sum_text.push_str(" + ");
}
if self.menu_sum_numbers.len() == self.table[1] as usize {
new_menu_sum_text.remove(new_menu_sum_text.len() - 2);
new_menu_sum_text.push_str("= ");
let mut sum_for_menu_sum_numbers: u16 = 0;
for number in self.menu_sum_numbers.iter() {
sum_for_menu_sum_numbers += *number as u16;
}
let max = self.maximum_in_rows.get(self.head_node.start_row as usize).unwrap();
if sum_for_menu_sum_numbers > *max {
self.maximum_in_rows_cells_location[self.head_node.start_row as usize] = self.current_in_rows_cells_location[self.head_node.start_row as usize].clone();
self.maximum_in_rows[self.head_node.start_row as usize] = sum_for_menu_sum_numbers;
}
new_menu_sum_text.push_str(sum_for_menu_sum_numbers.to_string().as_str());
}
self.menu_sum_texts.push(NumberText::new(
graphics::Text::new(
graphics::TextFragment::new(
new_menu_sum_text
).
font(graphics::Font::default()).
color(color).
scale(graphics::PxScale::from(15.0))
)));
} else {
self.game_not_finished = false;
println!("Finished :D");
let mut max_of_max: u16 = 0;
let mut i: u8 = 0;
let mut j: u8 = 0;
for max in self.maximum_in_rows.iter() {
if *max > max_of_max {
max_of_max = *max;
j = i;
}
i += 1;
}
let mut final_max = String::new();
final_max.push_str("Final Maximum : ");
final_max.push_str(max_of_max.to_string().as_str());
self.menu_max_text.text = graphics::Text::new(
graphics::TextFragment::new(
final_max
).
font(graphics::Font::default()).
color(graphics::Color::new(0.0, 1.0, 0.0, 1.0)).
scale(graphics::PxScale::from(15.0))
);
let final_paths = self.maximum_in_rows_cells_location.get(j as usize).unwrap();
for final_path in final_paths.iter() {
self.texts.get_mut(final_path[0] as usize).unwrap().get_mut(final_path[1] as usize).unwrap().text =
graphics::Text::new(
graphics::TextFragment::new(
self.numbers.get(final_path[0] as usize).unwrap().get(final_path[1] as usize).unwrap().to_string()
).
font(graphics::Font::default()).
color(graphics::Color::new(0.0, 1.0, 0.0, 1.0)).
scale(graphics::PxScale::from(40.0))
);
}
}
}
}
}
fn update_graphic(&self, context: &mut Context) {
graphics::draw(context, &self.menu_background, graphics::DrawParam::default()).expect("Error can't draw menu_background");
let mut i: u8 = 0;
for row in self.menu_texts.iter() {
graphics::draw(context, &row.text, (Vector2D::new(20.0, 30.0 + i as f32 * self.cell_height), graphics::Color::WHITE)).expect("Error updating graphic");
i += 1;
}
let mut i: u8 = 0;
for row in self.menu_sum_texts.iter() {
graphics::draw(context, &row.text, (Vector2D::new(120.0 + i as f32 * self.cell_width, 30.0 + self.head_node.start_row as f32 * self.cell_height), graphics::Color::WHITE)).expect("Error updating graphic");
i += 1;
}
for row in self.menu_lines.iter() {
graphics::draw(context, row, graphics::DrawParam::default()).expect("Error updating graphic");
}
for row in self.cells.iter() {
for column in row.iter() {
graphics::draw(context, &column.mesh, graphics::DrawParam::default()).expect("Error updating graphic");
}
}
for row in self.borders.iter() {
for column in row.iter() {
graphics::draw(context, &column.mesh, graphics::DrawParam::default()).expect("Error updating graphic");
}
}
i = 0;
let mut j: u8 = 0;
for row in self.texts.iter() {
for column in row.iter() {
graphics::draw(context, &column.text, (Vector2D::new(560.0 + j as f32 * self.cell_width + self.cell_width / 3.0, i as f32 * self.cell_height + self.cell_height / 4.0), graphics::Color::WHITE)).expect("Error updating graphic");
j += 1;
}
j = 0;
i += 1;
}
graphics::draw(context, &self.menu_max_text.text, (Vector2D::new(380.0, 680.0), graphics::Color::WHITE)).expect("Can't update final max graphic");
graphics::draw(context, &self.head_node.mesh, graphics::DrawParam::default()).expect("Error can't draw head_node mesh");
}
}
impl EventHandler for GoldMiner {
fn update(&mut self, context: &mut Context) -> GameResult<()> {
self.update_logic(context);
Ok(())
}
fn draw(&mut self, context: &mut Context) -> GameResult<()> {
graphics::clear(context, graphics::Color::WHITE);
self.update_graphic(context);
graphics::present(context)
}
} |
// q0039_combination_sum
struct Solution;
// impl Solution {
// pub fn combination_sum(candidates: Vec<i32>, target: i32) -> Vec<Vec<i32>> {
// let mut ret = vec![];
// Solution::solve(&candidates, target, &mut ret);
// ret
// }
// fn solve(cddt: &[i32], target: i32, ret: &mut Vec<Vec<i32>>) {
// for i in 1..=cddt.len() {
// let mut pos_comb = vec![];
// Solution::combine(&cddt, i as i32, vec![], &mut pos_comb);
// for scomb in pos_comb.iter() {
// let sum: i32 = scomb.iter().sum();
// if sum == target {
// ret.push(scomb.clone());
// } else if sum < target {
// let mut newr = Solution::combination_sum(scomb.clone(), target - sum);
// while let Some(mut v) = newr.pop() {
// for n in scomb {
// v.push(*n);
// }
// ret.push(v);
// }
// }
// }
// }
// }
// pub fn combine(c: &[i32], n: i32, mut selected: Vec<i32>, ret: &mut Vec<Vec<i32>>) {
// if 0 == n {
// ret.push(selected);
// return;
// }
// let clen = c.len();
// if clen < n as usize {
// return;
// }
// let mut y = selected.clone();
// if clen == 1 {
// selected.push(c[0]);
// ret.push(selected);
// return;
// }
// y.push(c[0]);
// Solution::combine(&c[1..], n - 1, y, ret);
// Solution::combine(&c[1..], n, selected, ret);
// }
// }
impl Solution {
pub fn combination_sum(candidates: Vec<i32>, target: i32) -> Vec<Vec<i32>> {
let mut candidates = candidates;
candidates.sort_unstable();
println!("{:?}", candidates);
let mut ret = vec![];
let mut tmp = vec![];
Solution::solve(&candidates[..], target, &mut tmp, &mut ret);
ret
}
fn solve(cddt: &[i32], target: i32, tmp: &mut Vec<i32>, ret: &mut Vec<Vec<i32>>) {
println!("begin with {:?}", tmp);
for (index, i) in cddt.iter().enumerate() {
tmp.push(*i);
let sum: i32 = tmp.iter().sum();
println!("working on {:?}--{}", tmp, sum);
if sum < target {
Solution::solve(&cddt[index..], target, tmp, ret);
// tmp.pop();
} else if sum == target {
println!("hit!");
ret.push(tmp.clone());
tmp.pop();
return;
} else {
tmp.pop();
return;
}
tmp.pop();
println!("end with {:?}", tmp);
}
println!("solve end {:?}", tmp);
}
}
#[cfg(test)]
mod tests {
use super::Solution;
#[test]
fn it_works() {
// assert_eq!(
// Solution::combination_sum(vec![2, 3, 6, 7], 7),
// vec![vec![7], vec![2, 2, 3]]
// );
// assert_eq!(
// Solution::combination_sum(vec![2, 3, 7], 18),
// vec![vec![2,2,2,2,2,2,2,2,2],vec![2,2,2,2,2,2,3,3],vec![2,2,2,2,3,7],vec![2,2,2,3,3,3,3],vec![2,2,7,7],vec![2,3,3,3,7],vec![3,3,3,3,3,3]]
// );
// assert_eq!(
// Solution::combination_sum(vec![2, 3, 5], 8),
// vec![vec![2, 2, 2, 2], vec![2, 3, 3], vec![3, 5]]
// );
assert_eq!(
Solution::combination_sum(
vec![
48, 22, 49, 24, 26, 47, 33, 40, 37, 39, 31, 46, 36, 43, 45, 34, 28, 20, 29, 25,
41, 32, 23
],
69
),
vec![
vec![20, 20, 29],
vec![20, 23, 26],
vec![20, 24, 25],
vec![20, 49],
vec![22, 22, 25],
vec![22, 23, 24],
vec![22, 47],
vec![23, 23, 23],
vec![23, 46],
vec![24, 45],
vec![26, 43],
vec![28, 41],
vec![29, 40],
vec![32, 37],
vec![33, 36]
]
);
}
// #[test]
// #[ignore]
// fn test_conbine() {
// let mut ret = vec![];
// let src = vec![1, 2, 3, 4, 5];
// let tmp = vec![];
// Solution::combine(&src, 2, tmp, &mut ret);
// assert_eq!(
// ret,
// vec![
// vec![1, 2],
// vec![1, 3],
// vec![1, 4],
// vec![1, 5],
// vec![2, 3],
// vec![2, 4],
// vec![2, 5],
// vec![3, 4],
// vec![3, 5],
// vec![4, 5],
// ]
// );
// let mut ret = vec![];
// let src = vec![1,2,3,4,5];
// let tmp = vec![];
// Solution::combine(&src, 1, tmp, &mut ret);
// assert_eq!(ret, vec![
// vec![2],
// vec![3],
// vec![4],
// vec![5],
// vec![1],
// ]);
// }
}
|
//A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 × 99.
//
//Find the largest palindrome made from the product of two 3-digit numbers.
// Loop through from 999 to 100 for both multipliers, testing if result is palindrome, return the maximum result
fn solve() -> u64 {
let mut num1 = 999;
let mut num2 = 999;
let mut result;
let mut result_str: String;
let mut result_str_rev: String;
let mut result_max = 0;
while num1 > 100 {
while num2 > 100 {
//Is num2 * num2 result a palindrome?
result_str = (num1 * num2).to_string();
result_str_rev = result_str.chars().rev().collect();
if result_str == result_str_rev {
//Is it larger than any we've found previously?
result = result_str.parse::<u64>().unwrap();
if result > result_max {
result_max = result;
}
}
num2 -= 1;
}
num2 = 999;
num1 -= 1;
}
//Return largest found palindrome
result_max
}
fn main() {
println!("{}", solve().to_string());
}
#[cfg(test)]
mod tests {
#[test]
fn max_palindrome_3_digits() {
assert_eq!(906609, super::solve());
}
}
|
use crate::grammar::ast::eq::{ast_eq, AstEq};
use crate::grammar::ast::BooleanLit;
use crate::grammar::ast::CharLit;
use crate::grammar::ast::Identifier;
use crate::grammar::ast::NumLitPattern;
use crate::grammar::ast::Pattern;
use crate::grammar::ast::StringLit;
use crate::grammar::ast::Underscore;
use crate::grammar::model::{HasSourceReference, WrightInput};
use crate::grammar::tracing::{parsers::map, trace_result};
use std::mem::discriminant;
use crate::grammar::tracing::parsers::alt;
use nom::IResult;
use std::fmt::Debug;
/// Underscore Pattern.
pub(crate) mod underscore;
/// Numerical literal pattern.
mod num_lit;
impl<T: Debug + Clone> Pattern<T> {
/// The name of this parser that appears in tracing.
pub const TRACE_NAME: &'static str = "Pattern";
}
impl<I: WrightInput> Pattern<I> {
fn parse_num_lit(input: I) -> IResult<I, Self> {
map(NumLitPattern::parse, Pattern::NumLit)(input)
}
fn parse_char_lit(input: I) -> IResult<I, Self> {
map(CharLit::parse, Pattern::CharLit)(input)
}
fn parse_string_lit(input: I) -> IResult<I, Self> {
map(StringLit::parse, Pattern::StringLit)(input)
}
fn parse_boolean_lit(input: I) -> IResult<I, Self> {
map(BooleanLit::parse, Pattern::BooleanLit)(input)
}
fn parse_identifier(input: I) -> IResult<I, Self> {
map(Identifier::parse, Pattern::Identifier)(input)
}
fn parse_underscore(input: I) -> IResult<I, Self> {
map(Underscore::parse, Pattern::Underscore)(input)
}
/// Parse a pattern
pub fn parse(input: I) -> IResult<I, Self> {
trace_result(
Self::TRACE_NAME,
alt((
Self::parse_num_lit,
Self::parse_char_lit,
Self::parse_string_lit,
Self::parse_boolean_lit,
Self::parse_identifier,
Self::parse_underscore,
))(input.trace_start_clone(Self::TRACE_NAME)),
)
}
}
impl<I: Debug + Clone> HasSourceReference<I> for Pattern<I> {
fn get_source_ref(&self) -> &I {
use Pattern::*;
match self {
NumLit(p) => p.get_source_ref(),
CharLit(p) => p.get_source_ref(),
StringLit(p) => p.get_source_ref(),
BooleanLit(p) => p.get_source_ref(),
Identifier(p) => p.get_source_ref(),
Underscore(p) => p.get_source_ref(),
ScopedName(p) => p.get_source_ref(),
}
}
}
impl<I: Debug + Clone + PartialEq> AstEq for Pattern<I> {
fn ast_eq(fst: &Self, snd: &Self) -> bool {
if discriminant(fst) != discriminant(snd) {
return false;
}
use Pattern::*;
match (fst, snd) {
(Underscore(a), Underscore(b)) => ast_eq(a, b),
(NumLit(a), NumLit(b)) => ast_eq(a, b),
(CharLit(a), CharLit(b)) => ast_eq(a, b),
(StringLit(a), StringLit(b)) => ast_eq(a, b),
(BooleanLit(a), BooleanLit(b)) => ast_eq(a, b),
(Identifier(a), Identifier(b)) => ast_eq(a, b),
_ => unreachable!(),
}
}
}
|
#![doc(html_logo_url = "https://avatars0.githubusercontent.com/u/54989751?s=200&v=4")]
//! # wapc
//!
//! The `wapc` crate provides a WebAssembly host runtime that conforms to an RPC mechanism
//! called **waPC**. waPC is designed specifically to prevent either side of the call from having
//! to know anything about _how_ or _when_ memory is allocated or freed. The interface may at first appear more
//! "chatty" than other protocols, but the cleanliness, ease of use, and simplified developer experience
//! is worth the few extra nanoseconds of latency.
//!
//! To use `wapc`, first you'll need a waPC-compliant WebAssembly module (referred to as the _guest_) to load
//! and interpret. You can find a number of these samples available in the GitHub repository,
//! and anything compiled with the [wascc](https://github.com/wascc) actor SDK can also be invoked
//! via waPC as it is 100% waPC compliant.
//!
//! To make function calls, first set your `host_callback` function, a function invoked by the _guest_.
//! Then execute `call` on the `WapcHost` instance.
//! # Example
//! ```
//! extern crate wapc;
//! use wapc::prelude::*;
//!
//! # fn load_file() -> Vec<u8> {
//! # include_bytes!("../.assets/hello.wasm").to_vec()
//! # }
//! # fn load_wasi_file() -> Vec<u8> {
//! # include_bytes!("../.assets/hello_wasi.wasm").to_vec()
//! # }
//! pub fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
//! let module_bytes = load_file();
//! let host = WapcHost::new(|id: u64, bd: &str, ns: &str, op: &str, payload: &[u8]| {
//! println!("Guest {} invoked '{}->{}:{}' with payload of {} bytes", id, bd, ns, op, payload.len());
//! Ok(vec![])
//! }, &module_bytes, None)?;
//!
//! let res = host.call("wapc:sample!Hello", b"this is a test")?;
//! assert_eq!(res, b"hello world!");
//!
//! Ok(())
//! }
//! ```
//!
//! # Notes
//! waPC is _reactive_. Guest modules cannot initiate host calls without first handling a call
//! initiated by the host. waPC will not automatically invoke any start functions--that decision
//! is up to the waPC library consumer. Guest modules can synchronously make as many host calls
//! as they like, but keep in mind that if a host call takes too long or fails, it'll cause the original
//! guest call to also fail.
//!
//! In summary, keep `host_callback` functions fast and resilient, and do not spawn new threads
//! within `host_callback` unless you must (and can synchronize memory access) because waPC
//! assumes a single-threaded execution environment. The `host_callback` function intentionally
//! has no references to the WebAssembly module bytes or the running instance.
#[macro_use]
extern crate log;
mod callbacks;
pub mod errors;
mod modreg;
pub mod prelude;
/// A result type for errors that occur within the wapc library
pub type Result<T> = std::result::Result<T, errors::Error>;
use crate::modreg::ModuleRegistry;
use std::sync::atomic::{AtomicU64, Ordering};
use wasmtime::Func;
use wasmtime::Instance;
use std::cell::RefCell;
use crate::callbacks::ModuleState;
use std::rc::Rc;
use wasmtime::*;
macro_rules! call {
($func:expr, $($p:expr),*) => {
match $func.call(&[$($p.into()),*]) {
Ok(result) => {
let result: i32 = result[0].i32().unwrap();
result
}
Err(e) => {
error!("Failure invoking guest module handler: {:?}", e);
0
}
}
}
}
static GLOBAL_MODULE_COUNT: AtomicU64 = AtomicU64::new(1);
const HOST_NAMESPACE: &str = "wapc";
// -- Functions called by guest, exported by host
const HOST_CONSOLE_LOG: &str = "__console_log";
const HOST_CALL: &str = "__host_call";
const GUEST_REQUEST_FN: &str = "__guest_request";
const HOST_RESPONSE_FN: &str = "__host_response";
const HOST_RESPONSE_LEN_FN: &str = "__host_response_len";
const GUEST_RESPONSE_FN: &str = "__guest_response";
const GUEST_ERROR_FN: &str = "__guest_error";
const HOST_ERROR_FN: &str = "__host_error";
const HOST_ERROR_LEN_FN: &str = "__host_error_len";
// -- Functions called by host, exported by guest
const GUEST_CALL: &str = "__guest_call";
// namespace needed for some language support
const WASI_UNSTABLE_NAMESPACE: &str = "wasi_unstable";
const WASI_SNAPSHOT_PREVIEW1_NAMESPACE: &str = "wasi_snapshot_preview1";
type HostCallback = dyn Fn(u64, &str, &str, &str, &[u8]) -> std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>
+ Sync
+ Send
+ 'static;
type LogCallback = dyn Fn(u64, &str) -> std::result::Result<(), Box<dyn std::error::Error + Send + Sync>>
+ Sync
+ Send
+ 'static;
#[derive(Debug, Clone)]
struct Invocation {
operation: String,
msg: Vec<u8>,
}
impl Invocation {
fn new(op: &str, msg: Vec<u8>) -> Invocation {
Invocation {
operation: op.to_string(),
msg,
}
}
}
/// Stores the parameters required to create a WASI instance
#[derive(Debug, Default)]
pub struct WasiParams {
argv: Vec<String>,
map_dirs: Vec<(String, String)>,
env_vars: Vec<(String, String)>,
preopened_dirs: Vec<String>,
}
impl WasiParams {
pub fn new(
argv: Vec<String>,
map_dirs: Vec<(String, String)>,
env_vars: Vec<(String, String)>,
preopened_dirs: Vec<String>,
) -> Self {
WasiParams {
argv,
map_dirs,
preopened_dirs,
env_vars,
}
}
}
/// A WebAssembly host runtime for waPC-compliant WebAssembly modules
///
/// Use an instance of this struct to provide a means of invoking procedure calls by
/// specifying an operation name and a set of bytes representing the opaque operation payload.
/// `WapcHost` makes no assumptions about the contents or format of either the payload or the
/// operation name.
pub struct WapcHost {
state: Rc<RefCell<ModuleState>>,
instance: Rc<RefCell<Option<Instance>>>,
wasidata: Option<WasiParams>,
guest_call_fn: Func,
}
impl WapcHost {
/// Creates a new instance of a waPC-compliant WebAssembly host runtime.
pub fn new(
host_callback: impl Fn(
u64,
&str,
&str,
&str,
&[u8],
) -> std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>
+ 'static
+ Sync
+ Send,
buf: &[u8],
wasi: Option<WasiParams>,
) -> Result<Self> {
let id = GLOBAL_MODULE_COUNT.fetch_add(1, Ordering::SeqCst);
let state = Rc::new(RefCell::new(ModuleState::new(id, Box::new(host_callback))));
let instance_ref = Rc::new(RefCell::new(None));
let instance = WapcHost::instance_from_buffer(buf, &wasi, state.clone())?;
instance_ref.replace(Some(instance));
let gc = guest_call_fn(instance_ref.clone())?;
let mh = WapcHost {
state,
instance: instance_ref,
wasidata: wasi,
guest_call_fn: gc,
};
mh.initialize()?;
Ok(mh)
}
/// Creates a new instance of a waPC-compliant WebAssembly host runtime with a callback handler
/// for logging
pub fn new_with_logger(
host_callback: impl Fn(
u64,
&str,
&str,
&str,
&[u8],
) -> std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>
+ 'static
+ Sync
+ Send,
buf: &[u8],
logger: impl Fn(u64, &str) -> std::result::Result<(), Box<dyn std::error::Error + Send + Sync>>
+ Sync
+ Send
+ 'static,
wasi: Option<WasiParams>,
) -> Result<Self> {
let id = GLOBAL_MODULE_COUNT.fetch_add(1, Ordering::SeqCst);
let state = Rc::new(RefCell::new(ModuleState::new_with_logger(
id,
Box::new(host_callback),
Box::new(logger),
)));
let instance_ref = Rc::new(RefCell::new(None));
let instance = WapcHost::instance_from_buffer(buf, &wasi, state.clone())?;
instance_ref.replace(Some(instance));
let gc = guest_call_fn(instance_ref.clone())?;
let mh = WapcHost {
state,
instance: instance_ref,
wasidata: wasi,
guest_call_fn: gc,
};
mh.initialize()?;
Ok(mh)
}
/// Returns a reference to the unique identifier of this module. If a parent process
/// has instantiated multiple `WapcHost`s, then the single static host call function
/// may be used to differentiate between modules.
pub fn id(&self) -> u64 {
self.state.borrow().id
}
/// Invokes the `__guest_call` function within the guest module as per the waPC specification.
/// Provide an operation name and an opaque payload of bytes and the function returns a `Result`
/// containing either an error or an opaque reply of bytes.
///
/// It is worth noting that the _first_ time `call` is invoked, the WebAssembly module
/// will be JIT-compiled. This can take up to a few seconds on debug .wasm files, but
/// all subsequent calls will be "hot" and run at near-native speeds.
pub fn call(&self, op: &str, payload: &[u8]) -> Result<Vec<u8>> {
let inv = Invocation::new(op, payload.to_vec());
{
let mut state = self.state.borrow_mut();
state.guest_response = None;
state.guest_request = Some((inv).clone());
state.guest_error = None;
}
let callresult: i32 = call!(
self.guest_call_fn,
inv.operation.len() as i32,
inv.msg.len() as i32
);
if callresult == 0 {
// invocation failed
match self.state.borrow().guest_error {
Some(ref s) => Err(errors::new(errors::ErrorKind::GuestCallFailure(s.clone()))),
None => Err(errors::new(errors::ErrorKind::GuestCallFailure(
"No error message set for call failure".to_string(),
))),
}
} else {
// invocation succeeded
match self.state.borrow().guest_response {
Some(ref e) => Ok(e.clone()),
None => match self.state.borrow().guest_error {
Some(ref s) => Err(errors::new(errors::ErrorKind::GuestCallFailure(s.clone()))),
None => Err(errors::new(errors::ErrorKind::GuestCallFailure(
"No error message OR response set for call success".to_string(),
))),
},
}
}
}
/// Performs a live "hot swap" of the WebAssembly module. Since execution is assumed to be
/// single-threaded within the environment of the `WapcHost`, this will not cause any pending function
/// calls to be lost. This will replace the currently executing WebAssembly module with the new
/// bytes.
///
/// **Note**: you will lose all JITted functions for this module, so the first `call` after a
/// hot swap will be "cold" and take longer than regular calls. There are an enormous number of
/// ways in which a hot swap could go horribly wrong, so please ensure you have the proper guards
/// in place before invoking it. Libraries that build upon this one can (and likely should) implement
/// some form of security to protect against malicious swaps.
///
/// If you perform a hot swap of a WASI module, you cannot alter the parameters used to create the WASI module
/// like the environment variables, mapped directories, pre-opened files, etc. Not abiding by this could lead
/// to privilege escalation attacks or non-deterministic behavior after the swap.
pub fn replace_module(&self, module: &[u8]) -> Result<()> {
info!(
"HOT SWAP - Replacing existing WebAssembly module with new buffer, {} bytes",
module.len()
);
let state = self.state.clone();
let new_instance = WapcHost::instance_from_buffer(module, &self.wasidata, state)?;
self.instance.borrow_mut().replace(new_instance);
self.initialize()
}
fn instance_from_buffer(
buf: &[u8],
wasi: &Option<WasiParams>,
state: Rc<RefCell<ModuleState>>,
) -> Result<Instance> {
let engine = Engine::default();
let store = Store::new(&engine);
let module = Module::new(&engine, buf).unwrap();
let d = WasiParams::default();
let wasi = match wasi {
Some(w) => w,
None => &d,
};
// Make wasi available by default.
let preopen_dirs =
modreg::compute_preopen_dirs(&wasi.preopened_dirs, &wasi.map_dirs).unwrap();
let argv = vec![]; // TODO: add support for argv (if applicable)
let module_registry =
ModuleRegistry::new(&store, &preopen_dirs, &argv, &wasi.env_vars).unwrap();
let imports = arrange_imports(&module, state.clone(), store.clone(), &module_registry);
Ok(wasmtime::Instance::new(&store, &module, imports?.as_slice()).unwrap())
}
fn initialize(&self) -> Result<()> {
if let Some(ext) = self
.instance
.borrow()
.as_ref()
.unwrap()
.get_export("_start")
{
ext.into_func()
.unwrap()
.call(&[])
.map(|_| ())
.map_err(|_err| {
errors::new(errors::ErrorKind::GuestCallFailure(
"Error invoking _start function!".to_string(),
))
})
} else {
Ok(())
}
}
}
// Called once, then the result is cached. This returns a `Func` that corresponds
// to the `__guest_call` export
fn guest_call_fn(instance: Rc<RefCell<Option<Instance>>>) -> Result<Func> {
if let Some(func) = instance.borrow().as_ref().unwrap().get_func(GUEST_CALL) {
Ok(func)
} else {
Err(errors::new(errors::ErrorKind::GuestCallFailure(
"Guest module did not export __guest_call function!".to_string(),
)))
}
}
/// wasmtime requires that the list of callbacks be "zippable" with the list
/// of module imports. In order to ensure that both lists are in the same
/// order, we have to loop through the module imports and instantiate the
/// corresponding callback. We **cannot** rely on a predictable import order
/// in the wasm module
fn arrange_imports(
module: &Module,
state: Rc<RefCell<ModuleState>>,
store: Store,
mod_registry: &ModuleRegistry,
) -> Result<Vec<Extern>> {
Ok(module
.imports()
.filter_map(|imp| {
if let ExternType::Func(_) = imp.ty() {
match imp.module() {
HOST_NAMESPACE => Some(callback_for_import(
imp.name(),
state.clone(),
store.clone(),
)),
// TODO: to forcibly block the use of WASI, these should error
// rather than looking up WASI modules.
WASI_UNSTABLE_NAMESPACE => {
let f = Extern::from(
mod_registry
.wasi_unstable
.get_export(imp.name())
.unwrap()
.clone(),
);
Some(f)
}
WASI_SNAPSHOT_PREVIEW1_NAMESPACE => {
let f: Extern = Extern::from(
mod_registry
.wasi_snapshot_preview1
.get_export(imp.name())
.unwrap()
.clone(),
);
Some(f)
}
other => panic!("import module `{}` was not found", other), //TODO: get rid of panic
}
} else {
None
}
})
.collect())
}
fn callback_for_import(import: &str, state: Rc<RefCell<ModuleState>>, store: Store) -> Extern {
match import {
HOST_CONSOLE_LOG => callbacks::console_log_func(&store, state.clone()).into(),
HOST_CALL => callbacks::host_call_func(&store, state.clone()).into(),
GUEST_REQUEST_FN => callbacks::guest_request_func(&store, state.clone()).into(),
HOST_RESPONSE_FN => callbacks::host_response_func(&store, state.clone()).into(),
HOST_RESPONSE_LEN_FN => callbacks::host_response_len_func(&store, state.clone()).into(),
GUEST_RESPONSE_FN => callbacks::guest_response_func(&store, state.clone()).into(),
GUEST_ERROR_FN => callbacks::guest_error_func(&store, state.clone()).into(),
HOST_ERROR_FN => callbacks::host_error_func(&store, state.clone()).into(),
HOST_ERROR_LEN_FN => callbacks::host_error_len_func(&store, state.clone()).into(),
_ => unreachable!(),
}
}
|
extern crate clap;
extern crate kvs;
use clap::{App, AppSettings, Arg, SubCommand};
use kvs::{KvStore, Result};
use std::env;
use std::process::exit;
fn main() -> Result<()> {
let matches = App::new(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(env!("CARGO_PKG_DESCRIPTION"))
.setting(AppSettings::DisableHelpSubcommand)
.setting(AppSettings::SubcommandRequiredElseHelp)
.setting(AppSettings::VersionlessSubcommands)
.subcommands(vec![
SubCommand::with_name("get").about("Get the value").arg(
Arg::with_name("key")
.takes_value(true)
.value_name("KEY")
.required(true)
.index(1),
),
SubCommand::with_name("set")
.about("set the value")
.arg(
Arg::with_name("key")
.takes_value(true)
.value_name("KEY")
.required(true)
.index(1),
)
.arg(
Arg::with_name("value")
.takes_value(true)
.value_name("VALUE")
.required(true)
.index(2),
),
SubCommand::with_name("rm").about("Remove a value").arg(
Arg::with_name("key")
.takes_value(true)
.value_name("KEY")
.required(true)
.index(1),
),
])
.get_matches();
match matches.subcommand() {
("get", Some(get_matches)) => {
let key = get_matches.value_of("key").unwrap();
let mut store = KvStore::open(env::current_dir()?)?;
match store.get(key.to_string())? {
Some(x) => println!("{}", x),
None => println!("Key not found"),
};
}
("set", Some(set_matches)) => {
let key = set_matches.value_of("key").unwrap();
let value = set_matches.value_of("value").unwrap();
let mut store = KvStore::open(env::current_dir()?)?;
store.set(key.to_string(), value.to_string())?;
}
("rm", Some(rm_matches)) => {
let key = rm_matches.value_of("key").unwrap();
let mut store = KvStore::open(env::current_dir()?)?;
if store.remove(key.to_string()).is_err() {
println!("Key not found");
exit(1);
};
}
("", None) => panic!("No subcommand was used"), // If no subcommand was usd it'll match the tuple ("", None)
_ => unreachable!(),
}
Ok(())
}
|
//! Safe abstractions for an I2C bus.
// use core::iter::TrustedLen;
use core::marker::PhantomData;
use core::ops::Deref;
use embedded_hal;
use stm32f7::stm32f7x6::{self as device, i2c1, RCC};
/// This trait marks all valid I2C types. Used to provide generic interfaces.
///
/// TODO: replace by trait alias when they're fully implemented
pub trait I2cTrait: Deref<Target = i2c1::RegisterBlock> {}
impl I2cTrait for device::I2C1 {}
impl I2cTrait for device::I2C2 {}
impl I2cTrait for device::I2C3 {}
/// Represents an I2C (Inter-Integrated Circuit) bus.
pub struct I2C<I: I2cTrait>(I);
/// Errors that can happen while accessing the I2C bus.
#[derive(Debug)]
pub enum Error {
/// A NACK flag (negative acknowledgement) was detected.
Nack,
}
/// An I2C address.
///
/// Currently only 7 bit addresses are supported.
#[derive(Debug, Clone, Copy)]
pub struct Address(u16);
impl Address {
/// Create a 7 bit I2C address.
pub const fn bits_7(addr: u8) -> Address {
Address((addr as u16) << 1)
}
}
fn icr_clear_all(w: &mut i2c1::icr::W) -> &mut i2c1::icr::W {
w.alertcf().set_bit(); // alert clear flag
w.timoutcf().set_bit(); // timeout detection clear flag
w.peccf().set_bit(); // PEC error clear flag
w.ovrcf().set_bit(); // overrun/underrun clear flag
w.arlocf().set_bit(); // arbitration loss clear flag
w.berrcf().set_bit(); // bus error clear flag
w.stopcf().set_bit(); // stop detection clear flag
w.nackcf().set_bit(); // not acknowledge clear flag
w.addrcf().set_bit(); // address matched clear flag
w
}
/// An active connection to a device on the I2C bus.
///
/// Allows reading and writing the registers of the device.
pub struct I2cConnection<'a, I: I2cTrait, T: RegisterType> {
i2c: &'a mut I2C<I>,
device_address: Address,
register_type: PhantomData<T>,
}
/// Valid register types of I2C devices.
///
/// This trait is implemented for the `u8` and `u16` types.
pub trait RegisterType: Sized {
/// Convert the register type into a byte slice and pass it to the specified closure.
fn write<F>(&self, f: F) -> Result<(), Error>
where
F: FnOnce(&[u8]) -> Result<(), Error>;
/// Call the specified closure with a mutable reference to a byte slice and then convert it
/// to the register type.
fn read<F>(f: F) -> Result<Self, Error>
where
F: FnOnce(&mut [u8]) -> Result<(), Error>;
}
impl RegisterType for u8 {
fn write<F>(&self, f: F) -> Result<(), Error>
where
F: FnOnce(&[u8]) -> Result<(), Error>,
{
f(&[*self])
}
fn read<F>(f: F) -> Result<Self, Error>
where
F: FnOnce(&mut [u8]) -> Result<(), Error>,
{
let mut buf = [0];
f(&mut buf)?;
Ok(buf[0])
}
}
impl RegisterType for u16 {
fn write<F>(&self, f: F) -> Result<(), Error>
where
F: FnOnce(&[u8]) -> Result<(), Error>,
{
f(&[(*self >> 8) as u8, *self as u8])
}
fn read<F>(f: F) -> Result<Self, Error>
where
F: FnOnce(&mut [u8]) -> Result<(), Error>,
{
let mut buf = [0, 0];
f(&mut buf)?;
Ok(u16::from(buf[0]) << 8 | u16::from(buf[1]))
}
}
impl<'a, I: I2cTrait, T: RegisterType> I2cConnection<'a, I, T> {
fn start(&mut self, read: bool, bytes: u8) {
self.i2c.0.cr2.write(|w| {
w.sadd().bits(self.device_address.0); // slave_address
w.start().set_bit(); // start_generation
w.rd_wrn().bit(read); // read_transfer
w.nbytes().bits(bytes); // number_of_bytes
w.autoend().clear_bit(); // automatic_end_mode
w
})
}
fn write_bytes<ITER>(&mut self, bytes: ITER) -> Result<(), Error>
where
ITER: Iterator<Item = u8> // + TrustedLen,
{
assert!(bytes.size_hint().1.is_some());
assert_eq!(
bytes.size_hint().0 as u8 as usize,
bytes.size_hint().0,
"transfers > 255 bytes are not implemented yet"
);
self.start(false, bytes.size_hint().0 as u8);
for b in bytes {
self.i2c.wait_for_txis()?;
self.i2c.0.txdr.modify(|_, w| w.txdata().bits(b)); // transmit_data
}
self.i2c.wait_for_transfer_complete()?;
self.clear_status_flags();
// reset cr2
self.i2c.0.cr2.write(|w| w);
Ok(())
}
fn read_bytes_raw<'b, ITER>(&mut self, buffer: ITER) -> Result<(), Error>
where
ITER: Iterator<Item = &'b mut u8> // + TrustedLen,
{
assert!(buffer.size_hint().1.is_some());
assert_eq!(
buffer.size_hint().0 as u8 as usize,
buffer.size_hint().0,
"transfers > 255 bytes are not implemented yet"
);
self.start(true, buffer.size_hint().0 as u8);
// read data from receive data register
for b in buffer {
self.i2c.wait_for_rxne()?;
*b = self.i2c.0.rxdr.read().rxdata().bits(); // receive_data
}
self.i2c.wait_for_transfer_complete()?;
self.clear_status_flags();
// reset cr2
self.i2c.0.cr2.write(|w| w);
Ok(())
}
fn pre(&mut self) {
self.clear_status_flags();
// flush transmit data register
self.i2c.0.isr.modify(|_, w| w.txe().set_bit()); // flush_txdr
}
fn clear_status_flags(&mut self) {
self.i2c.0.icr.write(|w| icr_clear_all(w));
}
/// Read the current value from the specified register.
pub fn read(&mut self, register_address: T) -> Result<T, Error> {
self.pre();
register_address.write(|addr_bytes| self.write_bytes(addr_bytes.iter().cloned()))?;
T::read(|val_bytes| self.read_bytes_raw(val_bytes.iter_mut()))
}
/// Read bytes from the specified register into the specified buffer.
pub fn read_bytes(&mut self, register_address: T, bytes: &mut [u8]) -> Result<(), Error> {
self.pre();
register_address.write(|addr_bytes| self.write_bytes(addr_bytes.iter().cloned()))?;
self.read_bytes_raw(bytes.iter_mut())
}
/// Write the specified bytes into to specified register.
pub fn write(&mut self, register_address: T, value: T) -> Result<(), Error> {
self.pre();
register_address.write(|addr_bytes| {
value.write(|val_bytes| {
self.write_bytes(addr_bytes.iter().cloned().chain(val_bytes.iter().cloned()))
})
})
}
}
impl<I: I2cTrait> I2C<I> {
/// Connects to the specified device and run the closure `f` with the connection as argument.
///
/// This function takes an exclusive reference to the `I2C` type because it blocks the I2C
/// bus. The connection is active until the closure `f` returns.
pub fn connect<T, F>(&mut self, device_address: Address, f: F) -> Result<(), Error>
where
T: RegisterType,
F: FnOnce(I2cConnection<I, T>) -> Result<(), Error>,
{
{
let conn = I2cConnection {
i2c: self,
device_address,
register_type: PhantomData,
};
f(conn)?;
}
self.stop()
}
/// Stop the active connection by sending a stop symbol.
pub fn stop(&mut self) -> Result<(), Error> {
self.0.cr2.modify(|_, w| w.stop().set_bit());
// reset cr2
self.0.cr2.write(|w| w);
self.wait_for_stop()
}
/// Update a device register.
pub fn update<F>(
&mut self,
device_address: Address,
register_address: u16,
f: F,
) -> Result<(), Error>
where
F: FnOnce(&mut u16),
{
self.connect(device_address, |mut conn| {
let mut value = conn.read(register_address)?;
f(&mut value);
conn.write(register_address, value)
})
}
/// Wait for “transmit interrupt status” flag
fn wait_for_txis(&self) -> Result<(), Error> {
loop {
let isr = self.0.isr.read();
if isr.nackf().bit_is_set() {
// nack_received
return Err(Error::Nack);
}
if isr.txis().bit_is_set() {
return Ok(());
}
}
}
/// Wait for "receive data register not empty" flag
fn wait_for_rxne(&self) -> Result<(), Error> {
loop {
let isr = self.0.isr.read();
if isr.nackf().bit_is_set() {
// nack_received
return Err(Error::Nack);
}
if isr.rxne().bit_is_set() {
return Ok(());
}
}
}
/// Wait for “transfer complete” flag
fn wait_for_transfer_complete(&self) -> Result<(), Error> {
loop {
let isr = self.0.isr.read();
if isr.nackf().bit_is_set() {
// nack_received
return Err(Error::Nack);
}
if isr.tc().bit_is_set() {
// transfer_complete
return Ok(());
}
}
}
/// Wait for automatically generated stop flag
fn wait_for_stop(&self) -> Result<(), Error> {
loop {
let isr = self.0.isr.read();
if isr.nackf().bit_is_set() {
// nack_received
return Err(Error::Nack);
}
if isr.stopf().bit_is_set() {
// stop_detected
return Ok(());
}
}
}
/// Provokes a NACK and checks if the response is as expected. Panics otherwise.
pub fn test_1(&mut self) {
let i2c = &mut self.0;
i2c.cr2.modify(|_, w| {
w.sadd().bits(Address::bits_7(0b101_0101).0); // slave_address
w.start().set_bit(); // start_generation
w.nbytes().bits(0); // number_of_bytes
w.autoend().set_bit(); // automatic_end_mode
w
});
loop {
let isr = i2c.isr.read();
if isr.nackf().bit_is_set() {
// nack_received
break;
}
assert!(isr.stopf().bit_is_clear()); // stop_detected
}
// clear status flags
i2c.icr.write(|w| icr_clear_all(w));
}
/// Try to access all I2C addresses. Panics on test failure.
#[allow(dead_code)]
pub fn test_2(&mut self) {
let i2c = &mut self.0;
let mut addr = 0;
loop {
i2c.cr2.modify(|_, w| {
w.sadd().bits(Address::bits_7(addr).0); // slave_address
w.start().set_bit(); // start_generation
w.nbytes().bits(0); // number_of_bytes
w.autoend().set_bit(); // automatic_end_mode
w
});
let mut isr = i2c.isr.read();
loop {
if isr.nackf().bit_is_set() || isr.stopf().bit_is_set() {
// nack_received or stop_detected
break;
}
isr = i2c.isr.read();
}
if !isr.nackf().bit_is_set() {
let _x = addr;
} else {
while i2c.isr.read().busy().bit_is_set() {}
// clear status flags
i2c.icr.write(|w| icr_clear_all(w));
}
addr += 1;
if addr >= 0x80 {
return;
}
}
}
}
impl<I: I2cTrait> embedded_hal::blocking::i2c::Read for I2C<I> {
type Error = Error;
fn read(&mut self, address: u8, buffer: &mut [u8]) -> Result<(), Self::Error> {
self.connect(
Address::bits_7(address),
|mut connection: I2cConnection<I, u8>| connection.read_bytes_raw(buffer.iter_mut()),
)
}
}
impl<I: I2cTrait> embedded_hal::blocking::i2c::Write for I2C<I> {
type Error = Error;
fn write(&mut self, address: u8, bytes: &[u8]) -> Result<(), Self::Error> {
self.connect(
Address::bits_7(address),
|mut connection: I2cConnection<I, u8>| connection.write_bytes(bytes.iter().cloned()),
)
}
}
impl<I: I2cTrait> embedded_hal::blocking::i2c::WriteRead for I2C<I> {
type Error = Error;
fn write_read(
&mut self,
address: u8,
bytes: &[u8],
buffer: &mut [u8],
) -> Result<(), Self::Error> {
self.connect(
Address::bits_7(address),
|mut connection: I2cConnection<I, u8>| {
connection.write_bytes(bytes.iter().cloned())?;
connection.read_bytes_raw(buffer.iter_mut())
},
)
}
}
/// Initialize the I2C bus and return an `I2C` type.
pub fn init<I: I2cTrait>(i2c: I, rcc: &mut RCC) -> I2C<I> {
// enable clocks
rcc.apb1enr.modify(|_, w| w.i2c3en().enabled());
// disable I2C peripheral
i2c.cr1.modify(|_, w| w.pe().clear_bit()); // peripheral_enable register
// configure timing register TODO: check/understand values
i2c.timingr.modify(|_, w| {
w.presc().bits(0x4); // timing_prescaler
w.scldel().bits(0x9); // data_setup_time
w.sdadel().bits(0x1); // data_hold_time
w.sclh().bits(0x27); // scl_high_period
w.scll().bits(0x32); // scl_low_period
w
});
// configure oar1
i2c.oar1.modify(|_, w| w.oa1en().clear_bit()); // own_address_1_enable register
i2c.oar1.modify(|_, w| {
w.oa1().bits(0x00); // own_address_1
w.oa1mode().clear_bit(); // 10 bit mode
w.oa1en().clear_bit(); // TODO
w
});
// configure cr2
i2c.cr2.modify(|_, w| {
w.add10().clear_bit(); // 10_bit_addressing mode
w.autoend().clear_bit(); // automatic_end_mode
w
});
// configure oar2
i2c.oar2.modify(|_, w| {
w.oa2en().clear_bit() // own_address_2_enable
});
// configure cr1
i2c.cr1.modify(|_, w| {
w.gcen().clear_bit(); // general_call
w.nostretch().clear_bit(); // clock_stretching_disable
w.pe().set_bit(); // peripheral_enable
w
});
// wait that init can finish
crate::system_clock::wait_ms(50);
I2C(i2c)
}
|
use super::Control;
use gilrs;
use rov::RovCommand::{self, LightsOn, LightsOff};
pub struct Lights {
// info
button: gilrs::Button,
// state
lights_state: bool,
was_pressed: bool,
need_to_write: bool,
}
impl Lights {
pub fn new(button: gilrs::Button) -> Self {
Lights {
button: button,
lights_state: false,
was_pressed: false,
need_to_write: true,
}
}
}
impl Control for Lights {
fn update(&mut self, input: &gilrs::GamepadState, _delta: f64) {
let is_pressed = input.is_pressed(self.button);
match (self.was_pressed, is_pressed) {
(false, true) => {
self.lights_state = !self.lights_state;
self.need_to_write = true;
}
_ => {
self.need_to_write = false;
}
}
self.was_pressed = is_pressed;
}
fn write_commands(&self, output: &mut Vec<RovCommand>) {
if self.need_to_write {
output.push(if self.lights_state {
LightsOn
} else {
LightsOff
});
}
}
}
|
//! [Traits]: Defining Shared Behavior
//!
//! [traits]: https://doc.rust-lang.org/book/ch10-02-traits.html
//!
//! # Examples
//!
//! ```
//! use the_book::ch10::{Article, Tweet, Summary};
//!
//! let article = Article {
//! headline: String::from("Headline!"),
//! content: String::from("Article"),
//! };
//! let tweet = Tweet {
//! username: String::from("Sam I am"),
//! content: String::from("tweet, tweet, tweet!"),
//! };
//!
//! assert_eq!(String::from("(Read more...)"), article.summarize());
//! assert_eq!(String::from("tweet, tweet, tweet! @Sam I am"), tweet.summarize());
//! ```
//!
//! Traits as Parameter
//!
//! ```
//! use the_book::ch10::{notify, notify2, Article, Summary};
//!
//! let article = Article {
//! headline: String::from("Headline!"),
//! content: String::from("Article"),
//! };
//! assert_eq!(String::from("Breaking news!: (Read more...)"), notify(&article));
//! assert_eq!(notify(&article), notify2(&article));
//! ```
//!
//! Traits as Parameter with the multiple trait bounds.
//!
//! ```
//! use the_book::ch10::{detailed_notify, detailed_notify2, Summary, Tweet};
//!
//! let tweet = Tweet {
//! username: "I".to_string(),
//! content: "yep".to_string(),
//! };
//! assert_eq!(
//! String::from("Breaking news!: yep @I\nTweet { username: \"I\", content: \"yep\" }"),
//! detailed_notify(&tweet),
//! );
//! assert_eq!(detailed_notify(&tweet), detailed_notify2(&tweet));
//! ```
//!
//! Returning types that implements traits
//!
//! ```
//! use the_book::ch10::{summarizable, Summary};
//!
//! let summary = summarizable("sam i am", "yep, this is the tweet");
//! assert_eq!(
//! String::from("yep, this is the tweet @sam i am"),
//! summary.summarize(),
//! );
//! ```
//! Using trait bounds to conditionally implement methods
//!
//! ```
//! use the_book::ch10::{Pair, Tweet};
//!
//! let p = Pair::new(1, 2);
//! p.cmp_display();
//!
//! let p = Pair::new(
//! Tweet { username: "A".to_string(), content: "Some tweet".to_string() },
//! Tweet { username: "B".to_string(), content: "Another tweet".to_string() },
//! );
//! // you can't do this, as [`Tweet`] doesn't implement PartialOrd.
//! // p.cmp_display();
//! ```
//! [`tweet`]: struct.Tweet.html
use core::fmt::{Debug, Display};
/// `impl` based `notify`, which is just a syntax sugar of [`notify2`].
///
/// [`notify2`]: fn.notify2.html
pub fn notify(item: &impl Summary) -> String {
format!("Breaking news!: {}", item.summarize())
}
/// trait bound [`notify`].
///
/// [`notify`]: fn.notify.html
pub fn notify2<T: Summary>(item: &T) -> String {
format!("Breaking news!: {}", item.summarize())
}
/// `impl` based `detailed_notify`, which is just a syntax sugar
/// of [`detailed_notify2`].
///
/// [`detailed_notify2`]: fn.detailed_notify2.html
pub fn detailed_notify(item: &(impl Summary + Debug)) -> String {
format!("Breaking news!: {}\n{:?}", item.summarize(), *item)
}
/// trait bound [`detailed_notify`].
///
/// [`detailed_notify`]: fn.detailed_notify.html
pub fn detailed_notify2<T: Summary + Debug>(item: &T) -> String {
format!("Breaking news!: {}\n{:?}", item.summarize(), *item)
}
/// Trait generator, which returns trait implementor, example.
pub fn summarizable(username: &str, content: &str) -> impl Summary {
Tweet {
username: username.to_string(),
content: content.to_string(),
}
}
/// Default trait implementation.
pub trait Summary {
fn summarize(&self) -> String {
String::from("(Read more...)")
}
}
/// [`Summary`] trait implementor.
///
/// [`summary`]: trait.Summary.html
pub struct Article {
pub headline: String,
pub content: String,
}
/// `Article` uses the default `summarize` method.
impl Summary for Article {}
/// [`Summary`] trait implementor.
///
/// [`summary`]: trait.Summary.html
#[derive(Debug)]
pub struct Tweet {
pub username: String,
pub content: String,
}
impl Summary for Tweet {
fn summarize(&self) -> String {
format!("{} @{}", self.content, self.username)
}
}
/// Conditional generic implementor with the trait bound.
pub struct Pair<T> {
x: T,
y: T,
}
impl<T> Pair<T> {
pub fn new(x: T, y: T) -> Self {
Self { x, y }
}
}
impl<T: Display + PartialOrd> Pair<T> {
pub fn cmp_display(&self) {
if self.x >= self.y {
println!("The largest member is x = {}", self.x);
} else {
println!("The largest member is y = {}", self.y);
}
}
}
|
// Copyright 2015 click2stream, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Ethernet packet definitions.
use std::io;
use std::mem;
use std::fmt;
use std::result;
use utils;
use std::io::Write;
use std::str::FromStr;
use std::error::Error;
use std::fmt::{Display, Formatter};
use utils::Serialize;
/// MacAddr parse error.
#[derive(Debug, Clone)]
pub struct AddrParseError {
msg: String,
}
impl Error for AddrParseError {
fn description(&self) -> &str {
&self.msg
}
}
impl Display for AddrParseError {
fn fmt(&self, f: &mut Formatter) -> result::Result<(), fmt::Error> {
f.write_str(self.description())
}
}
impl<'a> From<&'a str> for AddrParseError {
fn from(msg: &'a str) -> AddrParseError {
AddrParseError { msg: msg.to_string() }
}
}
/// MAC address type.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct MacAddr {
bytes: [u8; 6],
}
impl MacAddr {
/// Create a new MAC address.
pub fn new(a: u8, b: u8, c: u8, d: u8, e: u8, f: u8) -> MacAddr {
MacAddr { bytes: [a, b, c, d, e, f] }
}
/// Get address octets.
pub fn octets(&self) -> [u8; 6] {
self.bytes
}
/// Crete address from slice.
pub fn from_slice(bytes: &[u8]) -> MacAddr {
assert_eq!(bytes.len(), 6);
MacAddr::new(bytes[0], bytes[1], bytes[2],
bytes[3], bytes[4], bytes[5])
}
}
impl Display for MacAddr {
fn fmt(&self, f: &mut Formatter) -> result::Result<(), fmt::Error> {
f.write_str(&format!("{:02x}:{:02x}:{:02x}:{:02x}:{:02x}:{:02x}",
self.bytes[0], self.bytes[1], self.bytes[2],
self.bytes[3], self.bytes[4], self.bytes[5]))
}
}
impl FromStr for MacAddr {
type Err = AddrParseError;
fn from_str(s: &str) -> result::Result<Self, Self::Err> {
let octets = s.split(':')
.map(|x| u8::from_str_radix(x, 16)
.or(Err(AddrParseError::from("unable to parse a MAC address, invalid octet"))))
.collect::<Vec<_>>();
if octets.len() == 6 {
Ok(MacAddr::new(
try!(octets[0].clone()),
try!(octets[1].clone()),
try!(octets[2].clone()),
try!(octets[3].clone()),
try!(octets[4].clone()),
try!(octets[5].clone())))
} else {
Err(AddrParseError::from("unable to parse a MAC address, invalid number of octets"))
}
}
}
/// Packet parser error.
#[derive(Debug, Clone)]
pub struct PacketParseError {
msg: String,
}
impl Error for PacketParseError {
fn description(&self) -> &str {
&self.msg
}
}
impl Display for PacketParseError {
fn fmt(&self, f: &mut Formatter) -> result::Result<(), fmt::Error> {
f.write_str(&self.msg)
}
}
impl<'a> From<&'a str> for PacketParseError {
fn from(msg: &'a str) -> PacketParseError {
PacketParseError { msg: msg.to_string() }
}
}
/// Type alias for parser results.
pub type Result<T> = result::Result<T, PacketParseError>;
pub const ETYPE_ARP: u16 = 0x0806;
pub const ETYPE_IPV4: u16 = 0x0800;
/// Ethernet packet header.
#[derive(Debug, Copy, Clone)]
pub struct EtherPacketHeader {
pub src: MacAddr,
pub dst: MacAddr,
pub etype: u16,
}
impl EtherPacketHeader {
/// Create a new ethernet packet header.
pub fn new(src: MacAddr, dst: MacAddr, etype: u16) -> EtherPacketHeader {
EtherPacketHeader {
src: src,
dst: dst,
etype: etype
}
}
/// Get packet type.
pub fn packet_type(&self) -> EtherPacketType {
EtherPacketType::from(self.etype)
}
/// Get raw header.
fn raw_header(&self) -> RawEtherPacketHeader {
RawEtherPacketHeader {
src: self.src.octets(),
dst: self.dst.octets(),
etype: self.etype.to_be()
}
}
/// Read header from a given raw representation.
fn parse(data: &[u8]) -> EtherPacketHeader {
assert_eq!(data.len(), mem::size_of::<RawEtherPacketHeader>());
let ptr = data.as_ptr();
let ptr = ptr as *const RawEtherPacketHeader;
let rh = unsafe {
&*ptr
};
EtherPacketHeader {
src: MacAddr::from_slice(&rh.src),
dst: MacAddr::from_slice(&rh.dst),
etype: u16::from_be(rh.etype)
}
}
}
impl Serialize for EtherPacketHeader {
fn serialize<W: Write>(&self, w: &mut W) -> io::Result<()> {
w.write_all(utils::as_bytes(&self.raw_header()))
}
}
/// Packed representation of the Ethernet packet header.
#[repr(packed)]
#[derive(Debug, Copy, Clone)]
struct RawEtherPacketHeader {
dst: [u8; 6],
src: [u8; 6],
etype: u16,
}
/// Ethernet packet types.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum EtherPacketType {
ARP,
IPv4,
UNKNOWN
}
impl EtherPacketType {
/// Get system code of this packet type.
pub fn code(self) -> u16 {
match self {
EtherPacketType::ARP => ETYPE_ARP,
EtherPacketType::IPv4 => ETYPE_IPV4,
_ => panic!("no etype code for unknown packet type")
}
}
}
impl From<u16> for EtherPacketType {
/// Get ethernet packet type from a given code.
fn from(code: u16) -> EtherPacketType {
match code {
ETYPE_ARP => EtherPacketType::ARP,
ETYPE_IPV4 => EtherPacketType::IPv4,
_ => EtherPacketType::UNKNOWN
}
}
}
/// Common trait for ethernet packet body implementations.
pub trait EtherPacketBody : Sized {
/// Parse body from its raw representation.
fn parse(data: &[u8]) -> Result<Self>;
/// Serialize the packet body in-place using a given writer.
fn serialize<W: Write>(
&self,
eh: &EtherPacketHeader,
w: &mut W) -> io::Result<()>;
/// Get type of this body.
fn packet_type(&self) -> EtherPacketType;
}
impl EtherPacketBody for Vec<u8> {
fn parse(data: &[u8]) -> Result<Vec<u8>> {
Ok(data.to_vec())
}
fn serialize<W: Write>(
&self,
_: &EtherPacketHeader,
w: &mut W) -> io::Result<()> {
w.write_all(self)
}
fn packet_type(&self) -> EtherPacketType {
EtherPacketType::UNKNOWN
}
}
/// Ethernet packet.
#[derive(Debug, Clone)]
pub struct EtherPacket<B: EtherPacketBody> {
pub header: EtherPacketHeader,
pub body: B,
}
impl<B: EtherPacketBody> EtherPacket<B> {
/// Create a new ethernet packet.
pub fn new(header: EtherPacketHeader, body: B) -> EtherPacket<B> {
EtherPacket {
header: header,
body: body
}
}
/// Create a new ethernet packet.
pub fn create(
src: MacAddr,
dst: MacAddr,
body: B) -> EtherPacket<B> {
let pt = body.packet_type();
let header = EtherPacketHeader::new(src, dst, pt.code());
EtherPacket::new(header, body)
}
/// Parse a given ethernet packet.
pub fn parse(data: &[u8]) -> Result<EtherPacket<B>> {
let hsize = mem::size_of::<RawEtherPacketHeader>();
if data.len() < hsize {
Err(PacketParseError::from("unable to parse ethernet packet, not enough data"))
} else {
let header = EtherPacketHeader::parse(&data[..hsize]);
let body = try!(B::parse(&data[hsize..]));
let btype = body.packet_type();
if btype == EtherPacketType::UNKNOWN ||
btype == EtherPacketType::from(header.etype) {
Ok(EtherPacket::new(header, body))
} else {
Err(PacketParseError::from("expect and actual ethernet packet types do not match"))
}
}
}
}
impl<B: EtherPacketBody> Serialize for EtherPacket<B> {
fn serialize<W: Write>(&self, w: &mut W) -> io::Result<()> {
try!(self.header.serialize(w));
self.body.serialize(&self.header, w)
}
}
#[cfg(test)]
mod tests {
use super::*;
use net::raw::arp::*;
use utils::Serialize;
use net::utils::WriteBuffer;
use std::net::Ipv4Addr;
#[test]
fn test_mac_addr() {
let addr = MacAddr::new(1, 2, 3, 4, 5, 6);
let octets = addr.octets();
assert_eq!([1, 2, 3, 4, 5, 6], octets);
let addr2 = MacAddr::from_slice(&octets);
assert_eq!(octets, addr2.octets());
}
#[test]
fn test_ether_packet() {
let src = MacAddr::new(1, 2, 3, 4, 5, 6);
let dst = MacAddr::new(6, 5, 4, 3, 2, 1);
let sip = Ipv4Addr::new(192, 168, 3, 7);
let dip = Ipv4Addr::new(192, 168, 8, 1);
let arp = ArpPacket::ipv4_over_ethernet(ArpOperation::REQUEST,
&src, &sip, &dst, &dip);
let pkt = EtherPacket::create(src, dst, arp);
let mut buf = WriteBuffer::new(0);
pkt.serialize(&mut buf)
.unwrap();
let ep2 = EtherPacket::<ArpPacket>::parse(buf.as_bytes())
.unwrap();
assert_eq!(pkt.header.src.octets(), ep2.header.src.octets());
assert_eq!(pkt.header.dst.octets(), ep2.header.dst.octets());
assert_eq!(pkt.header.etype, ep2.header.etype);
}
}
|
use std::cmp;
fn gcd(a: i32, b: i32) -> i32 {
if b == 0 {
return a
}
gcd(b, a % b)
}
fn act2_6_1(p1: &Vec<i32>, p2: &Vec<i32>) -> i32 {
let mut mx:i32 = 0;
let mut mn:i32 = 0;
if p1[0].wrapping_sub(p2[0]) > p1[1].wrapping_sub(p2[1]) {
mx = cmp::max(p1[0], p2[0]).wrapping_sub(cmp::min(p1[0], p2[0]));
mn = cmp::max(p1[1], p2[1]).wrapping_sub(cmp::min(p1[1], p2[1]));
}else{
mn = cmp::max(p1[0], p2[0]).wrapping_sub(cmp::min(p1[0], p2[0]));
mx = cmp::max(p1[1], p2[1]).wrapping_sub(cmp::min(p1[1], p2[1]));
}
gcd(mx, mn) - 1
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn act2_6_1_test(){
assert_eq!(3, act2_6_1(&vec![1,11], &vec![5,3]));
}
}
|
// Copyright 2022 Alibaba Cloud. All rights reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use std::sync::Arc;
use tokio::sync::Notify;
use tokio::time::{error::Elapsed, timeout, Duration};
#[derive(Debug)]
struct Shared {
shutdown: AtomicBool,
notify_shutdown: Notify,
waiters: AtomicUsize,
notify_exit: Notify,
}
impl Shared {
fn is_shutdown(&self) -> bool {
self.shutdown.load(Ordering::Relaxed)
}
}
/// Wait for the shutdown notification.
#[derive(Debug)]
pub struct Waiter {
shared: Arc<Shared>,
}
/// Used to Notify all [`Waiter`s](Waiter) shutdown.
///
/// No `Clone` is provided. If you want multiple instances, you can use Arc<Notifier>.
/// Notifier will automatically call shutdown when dropping.
#[derive(Debug)]
pub struct Notifier {
shared: Arc<Shared>,
wait_time: Option<Duration>,
}
/// Create a new shutdown pair([`Notifier`], [`Waiter`]) without timeout.
///
/// The [`Notifier`]
pub fn new() -> (Notifier, Waiter) {
_with_timeout(None)
}
/// Create a new shutdown pair with the specified [`Duration`].
///
/// The [`Duration`] is used to specify the timeout of the [`Notifier::wait_all_exit()`].
///
/// [`Duration`]: tokio::time::Duration
pub fn with_timeout(wait_time: Duration) -> (Notifier, Waiter) {
_with_timeout(Some(wait_time))
}
fn _with_timeout(wait_time: Option<Duration>) -> (Notifier, Waiter) {
let shared = Arc::new(Shared {
shutdown: AtomicBool::new(false),
waiters: AtomicUsize::new(1),
notify_shutdown: Notify::new(),
notify_exit: Notify::new(),
});
let notifier = Notifier {
shared: shared.clone(),
wait_time,
};
let waiter = Waiter { shared };
(notifier, waiter)
}
impl Waiter {
/// Return `true` if the [`Notifier::shutdown()`] has been called.
///
/// [`Notifier::shutdown()`]: Notifier::shutdown()
pub fn is_shutdown(&self) -> bool {
self.shared.is_shutdown()
}
/// Waiting for the [`Notifier::shutdown()`] to be called.
pub async fn wait_shutdown(&self) {
while !self.is_shutdown() {
let shutdown = self.shared.notify_shutdown.notified();
if self.is_shutdown() {
return;
}
shutdown.await;
}
}
fn from_shared(shared: Arc<Shared>) -> Self {
shared.waiters.fetch_add(1, Ordering::Relaxed);
Self { shared }
}
}
impl Clone for Waiter {
fn clone(&self) -> Self {
Self::from_shared(self.shared.clone())
}
}
impl Drop for Waiter {
fn drop(&mut self) {
if 1 == self.shared.waiters.fetch_sub(1, Ordering::Relaxed) {
self.shared.notify_exit.notify_waiters();
}
}
}
impl Notifier {
/// Return `true` if the [`Notifier::shutdown()`] has been called.
///
/// [`Notifier::shutdown()`]: Notifier::shutdown()
pub fn is_shutdown(&self) -> bool {
self.shared.is_shutdown()
}
/// Notify all [`Waiter`s](Waiter) shutdown.
///
/// It will cause all calls blocking at `Waiter::wait_shutdown().await` to return.
pub fn shutdown(&self) {
let is_shutdown = self.shared.shutdown.swap(true, Ordering::Relaxed);
if !is_shutdown {
self.shared.notify_shutdown.notify_waiters();
}
}
/// Return the num of all [`Waiter`]s.
pub fn waiters(&self) -> usize {
self.shared.waiters.load(Ordering::Relaxed)
}
/// Create a new [`Waiter`].
pub fn subscribe(&self) -> Waiter {
Waiter::from_shared(self.shared.clone())
}
/// Wait for all [`Waiter`]s to drop.
pub async fn wait_all_exit(&self) -> Result<(), Elapsed> {
//debug_assert!(self.shared.is_shutdown());
if self.waiters() == 0 {
return Ok(());
}
let wait = self.wait();
if self.waiters() == 0 {
return Ok(());
}
wait.await
}
async fn wait(&self) -> Result<(), Elapsed> {
if let Some(tm) = self.wait_time {
timeout(tm, self.shared.notify_exit.notified()).await
} else {
self.shared.notify_exit.notified().await;
Ok(())
}
}
}
impl Drop for Notifier {
fn drop(&mut self) {
self.shutdown()
}
}
#[cfg(test)]
mod test {
use super::*;
#[tokio::test]
async fn it_work() {
let (notifier, waiter) = new();
let task = tokio::spawn(async move {
waiter.wait_shutdown().await;
});
assert_eq!(notifier.waiters(), 1);
notifier.shutdown();
task.await.unwrap();
assert_eq!(notifier.waiters(), 0);
}
#[tokio::test]
async fn notifier_drop() {
let (notifier, waiter) = new();
assert_eq!(notifier.waiters(), 1);
assert!(!waiter.is_shutdown());
drop(notifier);
assert!(waiter.is_shutdown());
assert_eq!(waiter.shared.waiters.load(Ordering::Relaxed), 1);
}
#[tokio::test]
async fn waiter_clone() {
let (notifier, waiter1) = new();
assert_eq!(notifier.waiters(), 1);
let waiter2 = waiter1.clone();
assert_eq!(notifier.waiters(), 2);
let waiter3 = notifier.subscribe();
assert_eq!(notifier.waiters(), 3);
drop(waiter2);
assert_eq!(notifier.waiters(), 2);
let task = tokio::spawn(async move {
waiter3.wait_shutdown().await;
assert!(waiter3.is_shutdown());
});
assert!(!waiter1.is_shutdown());
notifier.shutdown();
assert!(waiter1.is_shutdown());
task.await.unwrap();
assert_eq!(notifier.waiters(), 1);
}
#[tokio::test]
async fn concurrency_notifier_shutdown() {
let (notifier, waiter) = new();
let arc_notifier = Arc::new(notifier);
let notifier1 = arc_notifier.clone();
let notifier2 = notifier1.clone();
let task1 = tokio::spawn(async move {
assert_eq!(notifier1.waiters(), 1);
let waiter = notifier1.subscribe();
assert_eq!(notifier1.waiters(), 2);
notifier1.shutdown();
waiter.wait_shutdown().await;
});
let task2 = tokio::spawn(async move {
assert_eq!(notifier2.waiters(), 1);
notifier2.shutdown();
});
waiter.wait_shutdown().await;
assert!(arc_notifier.is_shutdown());
task1.await.unwrap();
task2.await.unwrap();
}
#[tokio::test]
async fn concurrency_notifier_wait() {
let (notifier, waiter) = new();
let arc_notifier = Arc::new(notifier);
let notifier1 = arc_notifier.clone();
let notifier2 = notifier1.clone();
let task1 = tokio::spawn(async move {
notifier1.shutdown();
notifier1.wait_all_exit().await.unwrap();
});
let task2 = tokio::spawn(async move {
notifier2.shutdown();
notifier2.wait_all_exit().await.unwrap();
});
waiter.wait_shutdown().await;
drop(waiter);
task1.await.unwrap();
task2.await.unwrap();
}
#[tokio::test]
async fn wait_all_exit() {
let (notifier, waiter) = new();
let mut tasks = Vec::with_capacity(100);
for i in 0..100 {
assert_eq!(notifier.waiters(), 1 + i);
let waiter1 = waiter.clone();
tasks.push(tokio::spawn(async move {
waiter1.wait_shutdown().await;
}));
}
drop(waiter);
assert_eq!(notifier.waiters(), 100);
notifier.shutdown();
notifier.wait_all_exit().await.unwrap();
for t in tasks {
t.await.unwrap();
}
}
#[tokio::test]
async fn wait_timeout() {
let (notifier, waiter) = with_timeout(Duration::from_millis(100));
let task = tokio::spawn(async move {
waiter.wait_shutdown().await;
tokio::time::sleep(Duration::from_millis(200)).await;
});
notifier.shutdown();
// Elapsed
assert!(matches!(notifier.wait_all_exit().await, Err(_)));
task.await.unwrap();
}
}
|
#![cfg_attr(target_os = "wasi", feature(wasi_ext))]
mod io {
mod isatty;
mod mmap;
#[cfg(not(target_os = "redox"))] // redox doesn't have cwd/openat
#[cfg(not(target_os = "wasi"))] // wasi support for S_IRUSR etc. submitted to libc in #2264
mod readwrite;
}
|
use crate::item::Item;
use crate::markdown::{markdown_to_html, markdown_to_html_strip_one_paragraph};
use crate::paths::AbsPath;
use crate::{
content::PostItem,
item::{RenderContext, TeraItem},
markdown::find_markdown_files,
site_url::SiteUrl,
};
use chrono::NaiveDate;
use eyre::{eyre, Result};
use itemref_derive::ItemRef;
use rayon::iter::{IntoParallelRefMutIterator, ParallelIterator};
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::cmp::Reverse;
use std::collections::{BTreeMap, BTreeSet, HashMap};
use std::fs;
use tera::Context;
use tracing::warn;
use yaml_front_matter::{Document, YamlFrontMatter};
use super::posts::{PostRef, PostRefContext};
pub fn load_series(
dir: AbsPath,
posts: &mut BTreeMap<PostRef, PostItem>,
) -> Result<BTreeMap<SeriesRef, SeriesItem>> {
let mut posts_in_series: HashMap<String, BTreeSet<Reverse<PostRef>>> = HashMap::new();
for post in posts.values() {
if let Some(id) = &post.series_id {
let series_posts = posts_in_series
.entry(id.to_string())
.or_insert(BTreeSet::new());
series_posts.insert(Reverse(post.post_ref()));
}
}
let mut series = find_markdown_files(dir)
.par_iter_mut()
.map(|path| SeriesItem::from_file(path.abs_path()).map(|serie| (serie.id.clone(), serie)))
.collect::<Result<HashMap<_, _>>>()?;
for (id, series_posts) in posts_in_series.into_iter() {
let serie = series
.get_mut(id.as_str())
.ok_or_else(|| eyre!("Missing series `{id}`"))?;
if series_posts.is_empty() {
warn!("Series definition without post: `{}`", serie.id);
continue;
}
serie.posts = series_posts;
for post in serie.posts.iter() {
let post = posts.get_mut(&post.0).expect("Should have post");
post.series = Some(serie.series_ref());
}
}
Ok(series
.into_values()
.map(|serie| (serie.series_ref(), serie))
.collect())
}
#[derive(ItemRef, Debug, Clone)]
#[item(SeriesItem)]
pub struct SeriesRef {
pub id: String,
#[order]
pub last_created: NaiveDate,
}
#[derive(Debug)]
pub struct SeriesItem {
pub id: String,
pub title: String,
pub completed: bool,
pub path: AbsPath,
pub url: SiteUrl,
pub description: String,
pub post_note: Option<String>,
pub posts: BTreeSet<Reverse<PostRef>>,
}
impl SeriesItem {
pub fn from_file(path: AbsPath) -> Result<Self> {
let raw_content = fs::read_to_string(&path)?;
Self::from_string(path, raw_content)
}
pub fn from_string(path: AbsPath, raw_content: String) -> Result<Self> {
let SeriesDirMetadata { id } = SeriesDirMetadata::from_path(&path)?;
let Document { metadata, content } = YamlFrontMatter::parse::<SeriesMetadata>(&raw_content)
.map_err(|err| eyre!("Failed to parse metadata for serie: {}\n{}", path, err))?;
let url =
SiteUrl::parse(&format!("/series/{id}/")).expect("Should be able to create a url");
let transformed_description = markdown_to_html(&content);
let transformmed_post_note = metadata
.post_note
.as_ref()
.map(|x| markdown_to_html_strip_one_paragraph(x).into_owned());
Ok(Self {
id,
title: metadata.title,
completed: metadata.completed,
path,
url,
description: transformed_description,
post_note: transformmed_post_note,
posts: BTreeSet::new(),
})
}
pub fn series_ref(&self) -> SeriesRef {
SeriesRef {
id: self.id.to_owned(),
last_created: self
.posts
.iter()
.next()
.expect("Should have posts")
.0
.created,
}
}
}
impl TeraItem for SeriesItem {
fn context(&self, ctx: &RenderContext) -> Context {
Context::from_serialize(SeriesContext::from_series(self, ctx)).unwrap()
}
fn template(&self) -> &str {
"series.html"
}
fn url(&self) -> &SiteUrl {
&self.url
}
}
#[derive(Debug, Clone, Serialize)]
pub struct SeriesContext<'a> {
title: Cow<'a, str>,
url: Cow<'a, str>,
description: &'a str,
completed: bool,
posts: Vec<PostRefContext<'a>>,
post_note: Option<&'a str>,
}
impl<'a> SeriesContext<'a> {
pub fn from_ref(series_ref: &SeriesRef, ctx: &'a RenderContext) -> Self {
let series = ctx
.content
.get_series(series_ref)
.expect("Should have series");
Self::from_series(series, ctx)
}
pub fn from_series(series: &'a SeriesItem, ctx: &'a RenderContext) -> Self {
Self {
title: html_escape::encode_text(&series.title),
url: series.url.href(),
description: &series.description,
completed: series.completed,
posts: series
.posts
.iter()
.map(|x| PostRefContext::from_ref(&x.0, ctx))
.collect(),
post_note: series.post_note.as_deref(),
}
}
}
#[derive(Deserialize, Debug)]
struct SeriesMetadata {
title: String,
completed: bool,
post_note: Option<String>,
}
pub struct SeriesDirMetadata {
pub id: String,
}
impl SeriesDirMetadata {
pub fn from_path(path: &AbsPath) -> Result<Self> {
match path.file_stem() {
Some(file) => Ok(Self {
id: file.to_string(),
}),
None => Err(eyre!("Bad series: {}", path)),
}
}
}
|
//! A bitmask generator for enum scoped bit flags.
//!
//! The `bitmask!` macro creates a struct and an enum that holds your flags. The enum contains all the
//! bit flag variants and the struct is a mixture of those bit flags called a bitmask.
//! It's syntax is as follows:
//!
//! ```ignore
//! bitmask! {
//! pub mask <struct_name>: <struct_type> where flags <enum_name> {
//! <flag_name> = <value>,
//! ...
//! }
//! }
//! ```
//!
//! where `pub` is optional and `struct_type` can be one of the primitive integer types
//! (`i8-64`, `u8-64`, `isize`, `usize`).
//!
//! # Application
//!
//! Sometimes you might want to wrap some lib that ports `C` or some other code through FFI
//! which exposes numerous defines/constants as `const`. Lets take a look at this example module:
//!
//! ```ignore
//! mod tex {
//! ...
//! pub const TEXTURE_2D: u32 = 1;
//! pub const TEXTURE_3D: u32 = 2;
//! pub const FLIP: u32 = 4;
//! ...
//! pub fn set_options(mask: u32) { ... }
//! }
//! ```
//!
//! To avoid collisions you would use these through the mod scope like so:
//!
//! ```ignore
//! tex::set_options(tex::TEXTURE_2D | tex::FLIP);
//! ```
//!
//! But that does not guarantee you that you won't use invalid flag values.
//! For example you could do:
//!
//! ```ignore
//! set_options(3 | 8);
//! ```
//!
//! Now imagine you had an enum to hold all of those flags and a common type that does not accept
//! any types other than the enum variants and itself. This is exactly what `bitmask!` does for you!
//! It generates an enum with the variants (flags) you supply and a struct that
//! holds a mask which is a mixture of these variants. So now our example would look like this:
//!
//! ```
//! # mod tex {
//! # pub const TEXTURE_2D: u32 = 1;
//! # pub const TEXTURE_3D: u32 = 2;
//! # pub const FLIP: u32 = 4;
//! # pub fn set_options(mask: u32) {}
//! # }
//! # #[macro_use] extern crate bitmask; fn main() {
//! bitmask! {
//! pub mask TexMask: u32 where flags TexOption {
//! Texture2d = tex::TEXTURE_2D,
//! Texture3d = tex::TEXTURE_3D,
//! Flip = tex::FLIP
//! }
//! }
//!
//! fn set_options(mask: TexMask) {
//! tex::set_options(*mask);
//! }
//!
//! // Single flag
//! set_options(TexOption::Texture2d.into());
//! set_options(TexMask::from(TexOption::Texture3d));
//!
//! // Multiple flags
//! set_options(TexOption::Texture2d | TexOption::Flip);
//! # }
//! ```
//!
//! # Things that are doable but _can change_ with time:
//!
//! If for some reason you want to define the enum and the struct yourself you can do so and use the
//! `@IMPL` branch of the macro to implement the methods. The only restrictions are that your
//! struct's inner field must be named `mask` and the enum should have the same size as the struct
//! which can be achieved through the `#[repr()]` modifier with the same integer type as the field `mask`.
//!
//! Implementing `Into<struct_name>` and `Deref` for your own custom type is possible if you want to
//! use it with the preimplemented methods for the mask but does not apply for the trait implements
//! like `BitOr` for example.
//!
//! # Examples:
//!
//! ```
//! # #[macro_use] extern crate bitmask; fn main() {
//! bitmask! {
//! mask BitMask: u32 where flags Flags {
//! Flag1 = 0x00000001,
//! Flag2 = 0x000000F0,
//! Flag3 = 0x00000800,
//! Flag123 = 0x000008F1,
//! // Note that function calls like `isize::min_value()`
//! // can't be used for enum discriminants in Rust.
//! FlagMax = ::std::u32::MAX
//! }
//! }
//!
//! let mut mask = BitMask::none();
//!
//! mask.set(Flags::Flag1 | Flags::Flag2);
//! assert_eq!(*mask, 0x000000F1);
//!
//! mask.unset(Flags::Flag1);
//! assert_eq!(*mask, 0x000000F0);
//!
//! mask.set(Flags::Flag123);
//! assert_eq!(*mask, 0x000008F1);
//! # }
//! ```
//!
//! You can add meta attributes like documentation (`#[doc = "..."]`) to each element of the macro:
//!
//! ```
//! # #[macro_use] extern crate bitmask; fn main() {
//! bitmask! {
//! /// Doc comment for the struct
//! pub mask SomeOtherMask: isize where
//! /// Doc comment for the enum
//! flags SomeOtherFlags {
//! /// Doc comment for the flag
//! FlagZero = 0,
//! FlagOne = 1
//! }
//! }
//! # }
//! ```
//!
//! Maybe not the best example but still... Cake is love!
//!
//! ```
//! # #[macro_use] extern crate bitmask; fn main() {
//! bitmask! {
//! mask Cake: u8 where flags Ingredients {
//! Sugar = 0b00000001,
//! Eggs = 0b00000010,
//! Flour = 0b00000100,
//! Milk = 0b00001000
//! }
//! }
//!
//! let quality_cake = Cake::all();
//! assert_eq!(*quality_cake, 0b00001111);
//! # }
//! ```
#![cfg_attr(not(feature = "std"), no_std)]
// Re-export libcore using an alias so that the macro can work in `no_std` crates while remaining
// compatible with normal crates
#[doc(hidden)]
pub extern crate core as __core;
/// The macro that generates a bitmask and it's associated bit flags.
///
/// See the crate level docs for more info and examples.
///
/// # Methods
///
/// * `none` - Create a new mask with all flags unset.
/// * `all` - Create a new mask with all flags set.
/// * `set` - Set a single flag if enum flag variant is passed or multiple if mask is passed.
/// * `unset` - Unset a single flag if enum flag variant is passed or multiple if mask is passed.
/// * `toggle` - Same as set/unset but always negates the flags (1 -> 0 and 0 -> 1).
/// * `contains` - Check if the mask contains a flag or a whole mask.
/// * `intersects` - Check if the mask intersects with a flag or a whole mask.
/// * `is_all` - Check if all flag variants are set.
/// * `is_none` - Check if all flag variants are unset.
///
/// # Operators
///
/// * `BitOr`
/// * `BitXor`
/// * `BitAnd`
/// * `BitOrAssign`
/// * `BitXorAssign`
/// * `BitAndAssign`
/// * `Not` - Same as toggle but creates a new mask with the resulting value.
///
/// # Misc
///
/// * `From`< _enum_ > for _struct_
/// * `Deref` for _struct_
/// * `Deref` for _enum_
// TODO: simplify the parsing when https://github.com/rust-lang/rust/issues/24189 is resolved
#[macro_export]
macro_rules! bitmask {
// Parse struct meta attributes, its name and its type.
(
$(#[$st_attr: meta])* mask $st_name: ident : $T: tt where
$(#[$en_attr: meta])* flags $en_name: ident { $($token: tt)+ }
) => {
bitmask! {
st_meta: [ $(#[$st_attr])* ],
st_name: $st_name,
mask_type: $T,
en_meta: [ $(#[$en_attr])* ],
en_name: $en_name,
flags: [
[]
],
$($token)+
}
};
// Parse flag meta attributes.
(
st_meta: [ $(#[$st_attr: meta])* ],
st_name: $st_name: ident,
mask_type: $T: tt,
en_meta: [ $(#[$en_attr: meta])* ],
en_name: $en_name: ident,
flags: [
$(
meta: [ $(#[$flag_attr: meta])* ]
flag: $flag_name: ident = $flag_value: expr;
)*
[ $(#[$prev_attr: meta])* ]
],
#[$next_attr: meta] $($token: tt)*
) => {
bitmask! {
st_meta: [ $(#[$st_attr])* ],
st_name: $st_name,
mask_type: $T,
en_meta: [ $(#[$en_attr])* ],
en_name: $en_name,
flags: [
$(
meta: [ $(#[$flag_attr])* ]
flag: $flag_name = $flag_value;
)*
[ $(#[$prev_attr])* #[$next_attr] ]
],
$($token)*
}
};
// Parse the flag itself.
// Handles the case with trailing comma.
(
st_meta: [ $(#[$st_attr: meta])* ],
st_name: $st_name: ident,
mask_type: $T: tt,
en_meta: [ $(#[$en_attr: meta])* ],
en_name: $en_name: ident,
flags: [
$(
meta: [ $(#[$flag_attr: meta])* ]
flag: $flag_name: ident = $flag_value: expr;
)*
[ $(#[$next_attr: meta])* ]
],
$next_name: ident = $next_value: expr, $($token: tt)*
) => {
bitmask! {
st_meta: [ $(#[$st_attr])* ],
st_name: $st_name,
mask_type: $T,
en_meta: [ $(#[$en_attr])* ],
en_name: $en_name,
flags: [
$(
meta: [ $(#[$flag_attr])* ]
flag: $flag_name = $flag_value;
)*
meta: [ $(#[$next_attr])* ]
flag: $next_name = $next_value;
[]
],
$($token)*
}
};
// Parse the last flag if missing trailing comma.
(
st_meta: [ $(#[$st_attr: meta])* ],
st_name: $st_name: ident,
mask_type: $T: tt,
en_meta: [ $(#[$en_attr: meta])* ],
en_name: $en_name: ident,
flags: [
$(
meta: [ $(#[$flag_attr: meta])* ]
flag: $flag_name: ident = $flag_value: expr;
)*
[ $(#[$next_attr: meta])* ]
],
$next_name: ident = $next_value: expr
) => {
bitmask! {
st_meta: [ $(#[$st_attr])* ],
st_name: $st_name,
mask_type: $T,
en_meta: [ $(#[$en_attr])* ],
en_name: $en_name,
flags: [
$(
meta: [ $(#[$flag_attr])* ]
flag: $flag_name = $flag_value;
)*
meta: [ $(#[$next_attr])* ]
flag: $next_name = $next_value;
[]
],
}
};
// End of the line. Time to declare the struct and enum.
(
st_meta: [ $(#[$st_attr: meta])* ],
st_name: $st_name: ident,
mask_type: $T: tt,
en_meta: [ $(#[$en_attr: meta])* ],
en_name: $en_name: ident,
flags: [
$(
meta: [ $(#[$flag_attr: meta])* ]
flag: $flag_name: ident = $flag_value: expr;
)+
[]
],
) => {
#[repr($T)]
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
#[cfg_attr(feature = "std", derive(Debug, Hash))]
#[allow(dead_code)]
$(#[$en_attr])*
enum $en_name {
$(
$(#[$flag_attr])*
$flag_name = $flag_value
),+
}
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
#[cfg_attr(feature = "std", derive(Debug, Hash))]
#[allow(dead_code)]
$(#[$st_attr])*
struct $st_name {
mask: $T
}
bitmask!(@IMPL $st_name $T $en_name, {
$($flag_name = $flag_value),+
});
};
// Parse struct meta attributes, its name and its type.
(
$(#[$st_attr: meta])* pub mask $st_name: ident : $T: tt where
$(#[$en_attr: meta])* flags $en_name: ident { $($token: tt)+ }
) => {
bitmask! {
pub
st_meta: [ $(#[$st_attr])* ],
st_name: $st_name,
mask_type: $T,
en_meta: [ $(#[$en_attr])* ],
en_name: $en_name,
flags: [
[]
],
$($token)+
}
};
// Parse flag meta attributes.
(
pub
st_meta: [ $(#[$st_attr: meta])* ],
st_name: $st_name: ident,
mask_type: $T: tt,
en_meta: [ $(#[$en_attr: meta])* ],
en_name: $en_name: ident,
flags: [
$(
meta: [ $(#[$flag_attr: meta])* ]
flag: $flag_name: ident = $flag_value: expr;
)*
[ $(#[$prev_attr: meta])* ]
],
#[$next_attr: meta] $($token: tt)*
) => {
bitmask! {
pub
st_meta: [ $(#[$st_attr])* ],
st_name: $st_name,
mask_type: $T,
en_meta: [ $(#[$en_attr])* ],
en_name: $en_name,
flags: [
$(
meta: [ $(#[$flag_attr])* ]
flag: $flag_name = $flag_value;
)*
[ $(#[$prev_attr])* #[$next_attr] ]
],
$($token)*
}
};
// Parse the flag itself.
// Handles the case with trailing comma.
(
pub
st_meta: [ $(#[$st_attr: meta])* ],
st_name: $st_name: ident,
mask_type: $T: tt,
en_meta: [ $(#[$en_attr: meta])* ],
en_name: $en_name: ident,
flags: [
$(
meta: [ $(#[$flag_attr: meta])* ]
flag: $flag_name: ident = $flag_value: expr;
)*
[ $(#[$next_attr: meta])* ]
],
$next_name: ident = $next_value: expr, $($token: tt)*
) => {
bitmask! {
pub
st_meta: [ $(#[$st_attr])* ],
st_name: $st_name,
mask_type: $T,
en_meta: [ $(#[$en_attr])* ],
en_name: $en_name,
flags: [
$(
meta: [ $(#[$flag_attr])* ]
flag: $flag_name = $flag_value;
)*
meta: [ $(#[$next_attr])* ]
flag: $next_name = $next_value;
[]
],
$($token)*
}
};
// Parse the last flag if missing trailing comma.
(
pub
st_meta: [ $(#[$st_attr: meta])* ],
st_name: $st_name: ident,
mask_type: $T: tt,
en_meta: [ $(#[$en_attr: meta])* ],
en_name: $en_name: ident,
flags: [
$(
meta: [ $(#[$flag_attr: meta])* ]
flag: $flag_name: ident = $flag_value: expr;
)*
[ $(#[$next_attr: meta])* ]
],
$next_name: ident = $next_value: expr
) => {
bitmask! {
pub
st_meta: [ $(#[$st_attr])* ],
st_name: $st_name,
mask_type: $T,
en_meta: [ $(#[$en_attr])* ],
en_name: $en_name,
flags: [
$(
meta: [ $(#[$flag_attr])* ]
flag: $flag_name = $flag_value;
)*
meta: [ $(#[$next_attr])* ]
flag: $next_name = $next_value;
[]
],
}
};
// End of the line. Time to declare the struct and enum.
(
pub
st_meta: [ $(#[$st_attr: meta])* ],
st_name: $st_name: ident,
mask_type: $T: tt,
en_meta: [ $(#[$en_attr: meta])* ],
en_name: $en_name: ident,
flags: [
$(
meta: [ $(#[$flag_attr: meta])* ]
flag: $flag_name: ident = $flag_value: expr;
)+
[]
],
) => {
#[repr($T)]
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
#[cfg_attr(feature = "std", derive(Debug, Hash))]
#[allow(dead_code)]
$(#[$en_attr])*
pub enum $en_name {
$(
$(#[$flag_attr])*
$flag_name = $flag_value
),+
}
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
#[cfg_attr(feature = "std", derive(Debug, Hash))]
#[allow(dead_code)]
$(#[$st_attr])*
pub struct $st_name {
mask: $T
}
bitmask!(@IMPL $st_name $T $en_name, {
$($flag_name = $flag_value),+
});
};
(@IMPL $st_name: ident $T: tt $en_name: ident, {
$($flag_name: ident = $flag_val: expr),+
}) => {
#[allow(dead_code)]
impl $st_name {
/// Create a new mask with all flags unset.
#[inline]
pub fn none() -> Self {
$st_name {
mask: 0
}
}
/// Create a new mask with all flags set.
#[inline]
pub fn all() -> Self {
$st_name {
mask: $($flag_val)|+
}
}
/// Set all `other` flags.
///
/// `other` can be either a single flag or another mask.
#[inline]
pub fn set<T>(&mut self, other: T)
where T: Into<$st_name> + $crate::__core::ops::Deref<Target = $T> {
self.mask |= *other;
}
/// Unset all `other` flags.
///
/// `other` can be either a single flag or another mask.
#[inline]
pub fn unset<T>(&mut self, other: T)
where T: Into<$st_name> + $crate::__core::ops::Deref<Target = $T> {
self.mask &= Self::all().mask ^ *other;
}
/// Toggle all `other` flags.
///
/// `other` can be either a single flag or another mask.
#[inline]
pub fn toggle<T>(&mut self, other: T)
where T: Into<$st_name> + $crate::__core::ops::Deref<Target = $T> {
self.mask ^= *other;
}
/// Check if the mask contains all of `other`'s flags.
///
/// `other` can be either a single flag or another mask.
#[inline]
pub fn contains<T>(&self, other: T) -> bool
where T: Into<$st_name> + $crate::__core::ops::Deref<Target = $T> {
self.mask & *other == *other
}
/// Check if the mask has common flags with `other`.
///
/// `other` can be either a single flag or another mask.
#[inline]
pub fn intersects<T>(&self, other: T) -> bool
where T: Into<$st_name> + $crate::__core::ops::Deref<Target = $T> {
self.mask & *other != 0
}
/// Check if all flags are set.
pub fn is_all(&self) -> bool {
self.mask == Self::all().mask
}
/// Check if all flags are unset.
pub fn is_none(&self) -> bool {
self.mask == 0
}
}
impl $crate::__core::convert::From<$en_name> for $st_name {
/// Create a mask from a single flag.
///
/// When creating a mask from multiple flags or another mask just use the `clone` method
/// or the `copy` semantics.
#[inline]
fn from(flag: $en_name) -> Self {
$st_name {
mask: flag as $T
}
}
}
impl $crate::__core::ops::Deref for $st_name {
type Target = $T;
/// Deref to the internal type.
///
/// Useful for FFI.
#[inline]
fn deref(&self) -> &$T {
&self.mask as &$T
}
}
impl $crate::__core::ops::Deref for $en_name {
type Target = $T;
/// Deref to the internal type.
///
/// Useful for FFI.
#[inline]
fn deref(&self) -> &$T {
unsafe { $crate::__core::mem::transmute(self) }
}
}
// TODO: when `concat_idents!` is stable, replace the `IMPL`s with a single impl on a common trait
// and use static dispatch. `T: Into + Deref` cannot be used in `impl` because that's too generic.
bitmask! { @IMPL_BITOR
$st_name, $st_name, $st_name;
$st_name, $en_name, $st_name;
$en_name, $st_name, $st_name;
$en_name, $en_name, $st_name;
}
bitmask! { @IMPL_BITAND
$st_name, $st_name, $st_name;
$st_name, $en_name, $st_name;
$en_name, $st_name, $st_name;
$en_name, $en_name, $st_name;
}
bitmask! { @IMPL_BITXOR
$st_name, $st_name, $st_name;
$st_name, $en_name, $st_name;
$en_name, $st_name, $st_name;
$en_name, $en_name, $st_name;
}
bitmask! { @IMPL_BITOR_ASSIGN
$st_name, $st_name;
$st_name, $en_name;
}
bitmask! { @IMPL_BITAND_ASSIGN
$st_name, $st_name;
$st_name, $en_name;
}
bitmask! { @IMPL_BITXOR_ASSIGN
$st_name, $st_name;
$st_name, $en_name;
}
bitmask! { @IMPL_NOT
$st_name, $st_name;
$en_name, $st_name;
}
};
(@IMPL_BITOR $($target: ty, $other: ty, $st_name: ident);*;) => {
$(impl $crate::__core::ops::BitOr<$other> for $target {
type Output = $st_name;
#[inline]
fn bitor(self, other: $other) -> Self::Output {
$st_name {
mask: *self | *other
}
}
})*
};
(@IMPL_BITAND $($target: ty, $other: ty, $st_name: ident);*;) => {
$(impl $crate::__core::ops::BitAnd<$other> for $target {
type Output = $st_name;
#[inline]
fn bitand(self, other: $other) -> Self::Output {
$st_name {
mask: *self & *other
}
}
})*
};
(@IMPL_BITXOR $($target: ty, $other: ty, $st_name: ident);*;) => {
$(impl $crate::__core::ops::BitXor<$other> for $target {
type Output = $st_name;
#[inline]
fn bitxor(self, other: $other) -> Self::Output {
$st_name {
mask: *self ^ *other
}
}
})*
};
(@IMPL_BITOR_ASSIGN $($target: ty, $other: ty);*;) => {
$(impl $crate::__core::ops::BitOrAssign<$other> for $target {
#[inline]
fn bitor_assign(&mut self, other: $other) {
self.mask |= *other
}
})*
};
(@IMPL_BITAND_ASSIGN $($target: ty, $other: ty);*;) => {
$(impl $crate::__core::ops::BitAndAssign<$other> for $target {
#[inline]
fn bitand_assign(&mut self, other: $other) {
self.mask &= *other
}
})*
};
(@IMPL_BITXOR_ASSIGN $($target: ty, $other: ty);*;) => {
$(impl $crate::__core::ops::BitXorAssign<$other> for $target {
#[inline]
fn bitxor_assign(&mut self, other: $other) {
self.mask ^= *other
}
})*
};
(@IMPL_NOT $($target: ty, $st_name: ident);*;) => {
$(impl $crate::__core::ops::Not for $target {
type Output = $st_name;
#[inline]
fn not(self) -> Self::Output {
let all_flags = $st_name::all();
$st_name {
mask: *all_flags ^ *self
}
}
})*
}
}
#[cfg(test)]
mod tests {
extern crate std;
bitmask! {
/// Doc comment
mask BitMask: isize where
/// Doc comment
flags Flags {
Flag1 = 0b00000001,
/// Doc comment
Flag2 = 0b00000010,
Flag3 = 0b00000100,
FlagMin = std::isize::MIN,
/// Doc comment
Flag123 = 0b00000111
}
}
bitmask! {
mask A: u8 where flags B {
Trailing = 1,
}
}
use self::Flags::*;
#[test]
fn test_set_unset() {
let mut bm = BitMask::none();
assert_eq!(*bm, 0b00000000);
bm.set(Flag2);
assert_eq!(*bm, Flag2 as isize);
bm.set(Flag3);
assert_eq!(*bm, 0b00000110);
bm.unset(Flag123);
assert_eq!(*bm, 0b00000000);
bm.set(FlagMin);
assert_eq!(*bm, isize::min_value());
}
#[test]
fn test_toggle() {
let mut bm = BitMask::none();
bm.toggle(Flag2);
assert_eq!(*bm, 0b00000010);
bm.toggle(BitMask::from(Flag3));
assert_eq!(*bm, 0b00000110);
bm.toggle(Flag123);
assert_eq!(*bm, 0b00000001);
}
#[test]
fn test_contains() {
let mut bm = BitMask::from(Flag2 | Flag3);
assert_eq!(bm.contains(Flag1), false);
assert_eq!(bm.contains(Flag2), true);
assert_eq!(bm.contains(Flag3), true);
assert_eq!(bm.contains(Flag123), false);
bm.set(Flag123);
assert_eq!(bm.contains(Flag123), true);
bm.set(FlagMin);
assert_eq!(bm.contains(Flag123), true);
}
#[test]
fn test_intersects() {
let bm = BitMask::from(Flag2 | Flag3);
assert_eq!(bm.intersects(Flag1), false);
assert_eq!(bm.intersects(Flag2), true);
assert_eq!(bm.intersects(Flag3), true);
assert_eq!(bm.intersects(Flag1 | Flag3), true);
assert_eq!(bm.intersects(Flag123), true);
}
#[test]
fn test_is_all() {
assert_eq!(BitMask::all().is_all(), true);
assert_eq!(BitMask::none().is_all(), false);
assert_eq!(BitMask::from(Flag1).is_all(), false);
assert_eq!(BitMask::from(Flag123 | FlagMin).is_all(), true);
}
#[test]
fn test_is_none() {
assert_eq!(BitMask::all().is_none(), false);
assert_eq!(BitMask::none().is_none(), true);
assert_eq!(BitMask::from(Flag1).is_none(), false);
assert_eq!(BitMask::from(Flag123 | FlagMin).is_none(), false);
}
#[test]
fn test_bitor() {
let bm = Flag1 | Flag3;
assert_eq!(*bm, 0b00000101);
let bm = BitMask::from(Flag1) | BitMask::from(Flag3);
assert_eq!(*bm, 0b00000101);
let bm = Flag1 | BitMask::from(Flag3);
assert_eq!(*bm, 0b00000101);
let bm = BitMask::from(Flag1) | Flag3;
assert_eq!(*bm, 0b00000101);
}
#[test]
fn test_bitand() {
let bm = Flag1 & Flag3;
assert_eq!(*bm, 0);
let bm = BitMask::from(Flag1) & BitMask::from(Flag3);
assert_eq!(*bm, 0);
let bm = Flag1 & BitMask::from(Flag3);
assert_eq!(*bm, 0);
let bm = BitMask::from(Flag1) & Flag3;
assert_eq!(*bm, 0);
}
#[test]
fn test_bitxor() {
let bm = Flag123 ^ Flag3;
assert_eq!(*bm, 0b00000011);
let bm = BitMask::from(Flag123) ^ BitMask::from(Flag3);
assert_eq!(*bm, 0b00000011);
let bm = Flag123 ^ BitMask::from(Flag3);
assert_eq!(*bm, 0b00000011);
let bm = BitMask::from(Flag123) ^ Flag3;
assert_eq!(*bm, 0b00000011);
}
#[test]
fn test_bitor_assign() {
let mut bm = BitMask::from(Flag1);
bm |= Flag3;
assert_eq!(*bm, 0b00000101);
let mut bm = BitMask::from(Flag1);
bm |= BitMask::from(Flag3);
assert_eq!(*bm, 0b00000101);
}
#[test]
fn test_bitand_assign() {
let mut bm = BitMask::from(Flag1);
bm &= Flag3;
assert_eq!(*bm, 0);
let mut bm = BitMask::from(Flag1);
bm &= BitMask::from(Flag3);
assert_eq!(*bm, 0);
}
#[test]
fn test_bitxor_assign() {
let mut bm = BitMask::from(Flag123);
bm ^= Flag3;
assert_eq!(*bm, 0b00000011);
let mut bm = BitMask::from(Flag123);
bm ^= BitMask::from(Flag3);
assert_eq!(*bm, 0b00000011);
}
#[test]
fn test_bitnot() {
let res = !Flag2;
assert_eq!(*res, isize::min_value() + 0b00000101);
let res = !BitMask::from(Flag1);
assert_eq!(*res, isize::min_value() + 0b00000110);
}
#[test]
fn test_pub_mask() {
mod inner {
bitmask! {
pub mask InnerMask: u8 where flags InnerFlags {
InnerFlag1 = 0
}
}
}
let _ = inner::InnerMask::none();
let _ = inner::InnerFlags::InnerFlag1;
}
}
|
use std::fs ;
use std::fs::File;
use std::io::Read;
use std::io::{BufRead, BufReader};
fn main() {
// ファイルを一気に読み込む
let path = "sample.txt" ;
println!("read all liens.");
if let Ok(data) = fs::read_to_string( path ) {
println!("data is {}", data);
} else {
println!("cannot open {}", path);
}
match fs::read_to_string( path ) {
Ok(data) => { println!("data is {}", data); },
_ => { println!("cannot open {}", path); }
}
// バッファに読み込む
let path = "sample.txt" ;
println!("read all lines by buffer.");
let mut file = File::open( path )
.expect("file not found.");
let mut data = String::new() ;
file.read_to_string( &mut data )
.expect("read error") ;
println!("data is {}", data);
// バッファに読み込む(Resultを使う)
let path = "sample.txt" ;
println!("read all lines by buffer.");
if let Ok(mut file) = File::open( path ) {
let mut data = String::new() ;
if let Ok(_) = file.read_to_string( &mut data ) {
println!("data is {}", data);
}
}
// バイト単位で読み込む
let path = "sample.txt" ;
println!("read 16 bytes by buffer.");
let mut file = File::open( path )
.expect("file not found.");
let mut buf : [u8; 1] = [0; 1] ;
for i in 0..16 {
file.read(&mut buf)
.expect("read error") ;
println!("buf is {}: {}", i, buf[0] as char);
}
// 1行ずつ読み込む
println!("read every one line.");
let file = File::open( path )
.expect("file not found.");
for line in BufReader::new(file).lines() {
if let Ok(l) = line {
println!("line is {}", l);
}
}
}
fn _main() -> std::io::Result<()> {
let path = "sample.txt" ;
let file = File::open( path )? ;
for line in BufReader::new(file).lines() {
println!("line is {}", line?);
}
Ok(())
}
|
//! ## deno_bindgen
//! This tool aims to simply types & glue code generation for FFI
//! libraries written in Rust.
//!
//! ### Usage
//! Add `serde` and `deno_bindgen` dependency to your crate.
//!
//! ```
//! use deno_bindgen::deno_bindgen;
//!
//! #[deno_bindgen]
//! pub struct Input {
//! /// Doc comments are transformed into
//! /// jsdocs.
//! a: Vec<Vec<String>>,
//! }
//!
//! #[deno_bindgen(non_blocking)]
//! pub fn say_hello(message: &str) {
//! println!("{}", message);
//! }
//! ```
//!
//! Generated bindings will look like this:
//! ```
//! // bindings/binding.ts
//!
//! // ... <init code here>
//!
//! type Input = {
//! /**
//! * Doc comments are transformed into
//! * jsdocs.
//! **/
//! a: Array<Array<string>>;
//! };
//!
//! export async function say_hello(message: string) {
//! // ... <glue code for symbol here>
//! }
//! ```
//! These bindings contain nessecary code to open the shared library,
//! define symbols and expose type definitions.
//! They can be simply imported into Deno code:
//! ```
//! import { say_hello } from "./bindings/bindings.ts";
//! await say_hello("Demn!")
//! ```
//!
pub use ::serde_json;
pub use deno_bindgen_macro::deno_bindgen;
|
use std::path::PathBuf;
use crate::os::{SysFreeString, SysStringLen, BSTR, HRESULT, LPSTR, LPWSTR, WCHAR};
use crate::wrapper::*;
use thiserror::Error;
pub(crate) fn to_wide(msg: &str) -> Vec<WCHAR> {
widestring::WideCString::from_str(msg)
.unwrap()
.into_vec_with_nul()
}
pub(crate) fn from_wide(wide: LPWSTR) -> String {
unsafe {
widestring::WideCStr::from_ptr_str(wide)
.to_string()
.expect("widestring decode failed")
}
}
pub(crate) fn from_bstr(string: BSTR) -> String {
unsafe {
let len = SysStringLen(string) as usize;
let result = widestring::WideStr::from_ptr(string, len)
.to_string()
.expect("widestring decode failed");
SysFreeString(string);
result
}
}
pub(crate) fn from_lpstr(string: LPSTR) -> String {
unsafe {
let len = (0..).take_while(|&i| *string.offset(i) != 0).count();
let slice: &[u8] = std::slice::from_raw_parts(string as *const u8, len);
std::str::from_utf8(slice).map(|s| s.to_owned()).unwrap()
}
}
struct DefaultIncludeHandler {}
impl DxcIncludeHandler for DefaultIncludeHandler {
fn load_source(&self, filename: String) -> Option<String> {
use std::io::Read;
match std::fs::File::open(filename) {
Ok(mut f) => {
let mut content = String::new();
f.read_to_string(&mut content).unwrap();
Some(content)
}
Err(_) => None,
}
}
}
#[derive(Error, Debug)]
pub enum HassleError {
#[error("Win32 error: {0:X}")]
Win32Error(HRESULT),
#[error("Compile error: {0}")]
CompileError(String),
#[error("Validation error: {0}")]
ValidationError(String),
#[error("Failed to load library {filename:?}: {inner:?}")]
LoadLibraryError {
filename: PathBuf,
#[source]
inner: libloading::Error,
},
#[error("LibLoading error: {0:?}")]
LibLoadingError(#[from] libloading::Error),
#[error("Windows only")]
WindowsOnly(String),
}
/// Helper function to directly compile a HLSL shader to an intermediate language,
/// this function expects `dxcompiler.dll` to be available in the current
/// executable environment.
///
/// Specify -spirv as one of the `args` to compile to SPIR-V
/// `dxc_path` can point to a library directly or the directory containing the library,
/// in which case the appended filename depends on the platform.
pub fn compile_hlsl(
source_name: &str,
shader_text: &str,
entry_point: &str,
target_profile: &str,
args: &[&str],
defines: &[(&str, Option<&str>)],
) -> Result<Vec<u8>, HassleError> {
let dxc = Dxc::new(None)?;
let compiler = dxc.create_compiler()?;
let library = dxc.create_library()?;
let blob = library
.create_blob_with_encoding_from_str(shader_text)
.map_err(HassleError::Win32Error)?;
let result = compiler.compile(
&blob,
source_name,
entry_point,
target_profile,
args,
Some(Box::new(DefaultIncludeHandler {})),
defines,
);
match result {
Err(result) => {
let error_blob = result
.0
.get_error_buffer()
.map_err(HassleError::Win32Error)?;
Err(HassleError::CompileError(
library.get_blob_as_string(&error_blob),
))
}
Ok(result) => {
let result_blob = result.get_result().map_err(HassleError::Win32Error)?;
Ok(result_blob.to_vec())
}
}
}
/// Helper function to validate a DXIL binary independant from the compilation process,
/// this function expects `dxcompiler.dll` and `dxil.dll` to be available in the current
/// execution environment.
///
/// `dxil.dll` is only available on Windows.
pub fn validate_dxil(data: &[u8]) -> Result<Vec<u8>, HassleError> {
let dxc = Dxc::new(None)?;
let dxil = Dxil::new(None)?;
let validator = dxil.create_validator()?;
let library = dxc.create_library()?;
let blob_encoding = library
.create_blob_with_encoding(data)
.map_err(HassleError::Win32Error)?;
match validator.validate(blob_encoding.into()) {
Ok(blob) => Ok(blob.to_vec()),
Err(result) => {
let error_blob = result
.0
.get_error_buffer()
.map_err(HassleError::Win32Error)?;
Err(HassleError::ValidationError(
library.get_blob_as_string(&error_blob),
))
}
}
}
|
// En déplaçant (move) une variable,
// on laisse son heap intact mais on l'alloue à une autre variable.
// Comment faire pour avoir deux fois le même texte dans le heap ? On clone.
// clone est une MÉTHODE
fn main() {
let s1 = String::from("Hello");
let s2 = s1.clone();
println!("Voici s1 : {}, \net voici s2 : {}", s1, s2);
}
|
pub mod storage;
pub mod query;
pub mod csv;
pub mod failpoints;
pub mod buffering;
pub mod restapi; |
//! Tracer structure.
use arctk::{
access, clone,
geom::Ray,
math::{Dir3, Pos3},
};
/// Colouring tracer.
#[derive(Clone)]
pub struct Tracer {
/// Internal ray.
ray: Ray,
/// Weighting power.
weight: f64,
/// Cumulative distance travelled.
dist_travelled: f64,
}
impl Tracer {
access!(ray, Ray);
clone!(dist_travelled, f64);
clone!(weight, weight_mut, f64);
/// Construct a new instance.
#[inline]
#[must_use]
pub const fn new(ray: Ray) -> Self {
Self {
ray,
weight: 1.0,
dist_travelled: 0.0,
}
}
/// Access the position.
#[inline]
#[must_use]
pub const fn pos(&self) -> &Pos3 {
self.ray.pos()
}
/// Access the direction.
#[inline]
#[must_use]
pub const fn dir(&self) -> &Dir3 {
self.ray.dir()
}
/// Set the tracer direction.
#[inline]
pub fn set_dir(&mut self, dir: Dir3) {
*self.ray.dir_mut() = dir;
}
/// Move along the direction of travel a given distance.
#[inline]
pub fn travel(&mut self, dist: f64) {
debug_assert!(dist > 0.0);
self.ray.travel(dist);
self.dist_travelled += dist;
}
}
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
Service_ListFileSystems(#[from] service::list_file_systems::Error),
#[error(transparent)]
FileSystem_Create(#[from] file_system::create::Error),
#[error(transparent)]
FileSystem_SetProperties(#[from] file_system::set_properties::Error),
#[error(transparent)]
FileSystem_Delete(#[from] file_system::delete::Error),
#[error(transparent)]
FileSystem_GetProperties(#[from] file_system::get_properties::Error),
#[error(transparent)]
FileSystem_ListPaths(#[from] file_system::list_paths::Error),
#[error(transparent)]
FileSystem_ListBlobHierarchySegment(#[from] file_system::list_blob_hierarchy_segment::Error),
#[error(transparent)]
Path_Read(#[from] path::read::Error),
#[error(transparent)]
Path_Lease(#[from] path::lease::Error),
#[error(transparent)]
Path_Create(#[from] path::create::Error),
#[error(transparent)]
Path_Update(#[from] path::update::Error),
#[error(transparent)]
Path_Delete(#[from] path::delete::Error),
#[error(transparent)]
Path_GetProperties(#[from] path::get_properties::Error),
#[error(transparent)]
Path_SetAccessControl(#[from] path::set_access_control::Error),
#[error(transparent)]
Path_SetAccessControlRecursive(#[from] path::set_access_control_recursive::Error),
#[error(transparent)]
Path_FlushData(#[from] path::flush_data::Error),
#[error(transparent)]
Path_AppendData(#[from] path::append_data::Error),
#[error(transparent)]
Path_SetExpiry(#[from] path::set_expiry::Error),
#[error(transparent)]
Path_Undelete(#[from] path::undelete::Error),
}
pub mod service {
use super::{models, API_VERSION};
pub async fn list_file_systems(
operation_config: &crate::OperationConfig,
resource: &str,
prefix: Option<&str>,
continuation: Option<&str>,
max_results: Option<i32>,
x_ms_client_request_id: Option<&str>,
timeout: Option<i64>,
x_ms_version: &str,
) -> std::result::Result<models::FileSystemList, list_file_systems::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/", operation_config.base_path(),);
let mut url = url::Url::parse(url_str).map_err(list_file_systems::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_file_systems::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("resource", resource);
if let Some(prefix) = prefix {
url.query_pairs_mut().append_pair("prefix", prefix);
}
if let Some(continuation) = continuation {
url.query_pairs_mut().append_pair("continuation", continuation);
}
if let Some(max_results) = max_results {
url.query_pairs_mut().append_pair("maxResults", max_results.to_string().as_str());
}
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_file_systems::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_file_systems::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileSystemList = serde_json::from_slice(rsp_body)
.map_err(|source| list_file_systems::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError = serde_json::from_slice(rsp_body)
.map_err(|source| list_file_systems::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_file_systems::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_file_systems {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod file_system {
use super::{models, API_VERSION};
pub async fn create(
operation_config: &crate::OperationConfig,
filesystem: &str,
resource: &str,
x_ms_client_request_id: Option<&str>,
timeout: Option<i64>,
x_ms_version: &str,
x_ms_properties: Option<&str>,
) -> std::result::Result<(), create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}", operation_config.base_path(), filesystem);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("resource", resource);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
if let Some(x_ms_properties) = x_ms_properties {
req_builder = req_builder.header("x-ms-properties", x_ms_properties);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn set_properties(
operation_config: &crate::OperationConfig,
filesystem: &str,
resource: &str,
x_ms_client_request_id: Option<&str>,
timeout: Option<i64>,
x_ms_version: &str,
x_ms_properties: Option<&str>,
if_modified_since: Option<&str>,
if_unmodified_since: Option<&str>,
) -> std::result::Result<(), set_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}", operation_config.base_path(), filesystem);
let mut url = url::Url::parse(url_str).map_err(set_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(set_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("resource", resource);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
if let Some(x_ms_properties) = x_ms_properties {
req_builder = req_builder.header("x-ms-properties", x_ms_properties);
}
if let Some(if_modified_since) = if_modified_since {
req_builder = req_builder.header("If-Modified-Since", if_modified_since);
}
if let Some(if_unmodified_since) = if_unmodified_since {
req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(set_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(set_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| set_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Err(set_properties::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod set_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
filesystem: &str,
resource: &str,
x_ms_client_request_id: Option<&str>,
timeout: Option<i64>,
x_ms_version: &str,
if_modified_since: Option<&str>,
if_unmodified_since: Option<&str>,
) -> std::result::Result<(), delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}", operation_config.base_path(), filesystem);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("resource", resource);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
if let Some(if_modified_since) = if_modified_since {
req_builder = req_builder.header("If-Modified-Since", if_modified_since);
}
if let Some(if_unmodified_since) = if_unmodified_since {
req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_properties(
operation_config: &crate::OperationConfig,
filesystem: &str,
resource: &str,
x_ms_client_request_id: Option<&str>,
timeout: Option<i64>,
x_ms_version: &str,
) -> std::result::Result<(), get_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}", operation_config.base_path(), filesystem);
let mut url = url::Url::parse(url_str).map_err(get_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::HEAD);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("resource", resource);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| get_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_properties::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_paths(
operation_config: &crate::OperationConfig,
filesystem: &str,
resource: &str,
x_ms_client_request_id: Option<&str>,
timeout: Option<i64>,
x_ms_version: &str,
continuation: Option<&str>,
directory: Option<&str>,
recursive: bool,
max_results: Option<i32>,
upn: Option<bool>,
) -> std::result::Result<models::PathList, list_paths::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}?resource=filesystem", operation_config.base_path(), filesystem);
let mut url = url::Url::parse(url_str).map_err(list_paths::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_paths::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("resource", resource);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
if let Some(continuation) = continuation {
url.query_pairs_mut().append_pair("continuation", continuation);
}
if let Some(directory) = directory {
url.query_pairs_mut().append_pair("directory", directory);
}
url.query_pairs_mut().append_pair("recursive", recursive.to_string().as_str());
if let Some(max_results) = max_results {
url.query_pairs_mut().append_pair("maxResults", max_results.to_string().as_str());
}
if let Some(upn) = upn {
url.query_pairs_mut().append_pair("upn", upn.to_string().as_str());
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_paths::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_paths::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::PathList =
serde_json::from_slice(rsp_body).map_err(|source| list_paths::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| list_paths::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_paths::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_paths {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_blob_hierarchy_segment(
operation_config: &crate::OperationConfig,
filesystem: &str,
restype: &str,
comp: &str,
prefix: Option<&str>,
delimiter: Option<&str>,
marker: Option<&str>,
max_results: Option<i32>,
include: &[&str],
showonly: Option<&str>,
timeout: Option<i64>,
x_ms_version: &str,
x_ms_client_request_id: Option<&str>,
) -> std::result::Result<models::ListBlobsHierarchySegmentResponse, list_blob_hierarchy_segment::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}?restype=container&comp=list&hierarchy",
operation_config.base_path(),
filesystem
);
let mut url = url::Url::parse(url_str).map_err(list_blob_hierarchy_segment::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_blob_hierarchy_segment::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("restype", restype);
url.query_pairs_mut().append_pair("comp", comp);
if let Some(prefix) = prefix {
url.query_pairs_mut().append_pair("prefix", prefix);
}
if let Some(delimiter) = delimiter {
url.query_pairs_mut().append_pair("delimiter", delimiter);
}
if let Some(marker) = marker {
url.query_pairs_mut().append_pair("marker", marker);
}
if let Some(max_results) = max_results {
url.query_pairs_mut().append_pair("maxResults", max_results.to_string().as_str());
}
if let Some(showonly) = showonly {
url.query_pairs_mut().append_pair("showonly", showonly);
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(list_blob_hierarchy_segment::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_blob_hierarchy_segment::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ListBlobsHierarchySegmentResponse = serde_json::from_slice(rsp_body)
.map_err(|source| list_blob_hierarchy_segment::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError = serde_json::from_slice(rsp_body)
.map_err(|source| list_blob_hierarchy_segment::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_blob_hierarchy_segment::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_blob_hierarchy_segment {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
pub mod path {
use super::{models, API_VERSION};
pub async fn read(
operation_config: &crate::OperationConfig,
filesystem: &str,
path: &str,
x_ms_client_request_id: Option<&str>,
timeout: Option<i64>,
x_ms_version: &str,
range: Option<&str>,
x_ms_lease_id: Option<&str>,
x_ms_range_get_content_md5: Option<bool>,
if_match: Option<&str>,
if_none_match: Option<&str>,
if_modified_since: Option<&str>,
if_unmodified_since: Option<&str>,
) -> std::result::Result<read::Response, read::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}/{}", operation_config.base_path(), filesystem, path);
let mut url = url::Url::parse(url_str).map_err(read::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(read::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
if let Some(range) = range {
req_builder = req_builder.header("Range", range);
}
if let Some(x_ms_lease_id) = x_ms_lease_id {
req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id);
}
if let Some(x_ms_range_get_content_md5) = x_ms_range_get_content_md5 {
req_builder = req_builder.header("x-ms-range-get-content-md5", x_ms_range_get_content_md5.to_string());
}
if let Some(if_match) = if_match {
req_builder = req_builder.header("If-Match", if_match);
}
if let Some(if_none_match) = if_none_match {
req_builder = req_builder.header("If-None-Match", if_none_match);
}
if let Some(if_modified_since) = if_modified_since {
req_builder = req_builder.header("If-Modified-Since", if_modified_since);
}
if let Some(if_unmodified_since) = if_unmodified_since {
req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(read::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(read::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: serde_json::Value =
serde_json::from_slice(rsp_body).map_err(|source| read::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(read::Response::Ok200(rsp_value))
}
http::StatusCode::PARTIAL_CONTENT => {
let rsp_body = rsp.body();
let rsp_value: serde_json::Value =
serde_json::from_slice(rsp_body).map_err(|source| read::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(read::Response::PartialContent206(rsp_value))
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| read::Error::DeserializeError(source, rsp_body.clone()))?;
Err(read::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod read {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(serde_json::Value),
PartialContent206(serde_json::Value),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn lease(
operation_config: &crate::OperationConfig,
filesystem: &str,
path: &str,
x_ms_client_request_id: Option<&str>,
timeout: Option<i64>,
x_ms_version: &str,
x_ms_lease_action: &str,
x_ms_lease_duration: Option<i32>,
x_ms_lease_break_period: Option<i32>,
x_ms_lease_id: Option<&str>,
x_ms_proposed_lease_id: Option<&str>,
if_match: Option<&str>,
if_none_match: Option<&str>,
if_modified_since: Option<&str>,
if_unmodified_since: Option<&str>,
) -> std::result::Result<lease::Response, lease::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}/{}", operation_config.base_path(), filesystem, path);
let mut url = url::Url::parse(url_str).map_err(lease::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(lease::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
req_builder = req_builder.header("x-ms-lease-action", x_ms_lease_action);
if let Some(x_ms_lease_duration) = x_ms_lease_duration {
req_builder = req_builder.header("x-ms-lease-duration", x_ms_lease_duration);
}
if let Some(x_ms_lease_break_period) = x_ms_lease_break_period {
req_builder = req_builder.header("x-ms-lease-break-period", x_ms_lease_break_period);
}
if let Some(x_ms_lease_id) = x_ms_lease_id {
req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id);
}
if let Some(x_ms_proposed_lease_id) = x_ms_proposed_lease_id {
req_builder = req_builder.header("x-ms-proposed-lease-id", x_ms_proposed_lease_id);
}
if let Some(if_match) = if_match {
req_builder = req_builder.header("If-Match", if_match);
}
if let Some(if_none_match) = if_none_match {
req_builder = req_builder.header("If-None-Match", if_none_match);
}
if let Some(if_modified_since) = if_modified_since {
req_builder = req_builder.header("If-Modified-Since", if_modified_since);
}
if let Some(if_unmodified_since) = if_unmodified_since {
req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(lease::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(lease::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(lease::Response::Ok200),
http::StatusCode::CREATED => Ok(lease::Response::Created201),
http::StatusCode::ACCEPTED => Ok(lease::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| lease::Error::DeserializeError(source, rsp_body.clone()))?;
Err(lease::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod lease {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200,
Created201,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
filesystem: &str,
path: &str,
x_ms_client_request_id: Option<&str>,
timeout: Option<i64>,
x_ms_version: &str,
resource: Option<&str>,
continuation: Option<&str>,
mode: Option<&str>,
x_ms_cache_control: Option<&str>,
x_ms_content_encoding: Option<&str>,
x_ms_content_language: Option<&str>,
x_ms_content_disposition: Option<&str>,
x_ms_content_type: Option<&str>,
x_ms_rename_source: Option<&str>,
x_ms_lease_id: Option<&str>,
x_ms_source_lease_id: Option<&str>,
x_ms_properties: Option<&str>,
x_ms_permissions: Option<&str>,
x_ms_umask: Option<&str>,
if_match: Option<&str>,
if_none_match: Option<&str>,
if_modified_since: Option<&str>,
if_unmodified_since: Option<&str>,
x_ms_source_if_match: Option<&str>,
x_ms_source_if_none_match: Option<&str>,
x_ms_source_if_modified_since: Option<&str>,
x_ms_source_if_unmodified_since: Option<&str>,
) -> std::result::Result<(), create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}/{}", operation_config.base_path(), filesystem, path);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
if let Some(resource) = resource {
url.query_pairs_mut().append_pair("resource", resource);
}
if let Some(continuation) = continuation {
url.query_pairs_mut().append_pair("continuation", continuation);
}
if let Some(mode) = mode {
url.query_pairs_mut().append_pair("mode", mode);
}
if let Some(x_ms_cache_control) = x_ms_cache_control {
req_builder = req_builder.header("x-ms-cache-control", x_ms_cache_control);
}
if let Some(x_ms_content_encoding) = x_ms_content_encoding {
req_builder = req_builder.header("x-ms-content-encoding", x_ms_content_encoding);
}
if let Some(x_ms_content_language) = x_ms_content_language {
req_builder = req_builder.header("x-ms-content-language", x_ms_content_language);
}
if let Some(x_ms_content_disposition) = x_ms_content_disposition {
req_builder = req_builder.header("x-ms-content-disposition", x_ms_content_disposition);
}
if let Some(x_ms_content_type) = x_ms_content_type {
req_builder = req_builder.header("x-ms-content-type", x_ms_content_type);
}
if let Some(x_ms_rename_source) = x_ms_rename_source {
req_builder = req_builder.header("x-ms-rename-source", x_ms_rename_source);
}
if let Some(x_ms_lease_id) = x_ms_lease_id {
req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id);
}
if let Some(x_ms_source_lease_id) = x_ms_source_lease_id {
req_builder = req_builder.header("x-ms-source-lease-id", x_ms_source_lease_id);
}
if let Some(x_ms_properties) = x_ms_properties {
req_builder = req_builder.header("x-ms-properties", x_ms_properties);
}
if let Some(x_ms_permissions) = x_ms_permissions {
req_builder = req_builder.header("x-ms-permissions", x_ms_permissions);
}
if let Some(x_ms_umask) = x_ms_umask {
req_builder = req_builder.header("x-ms-umask", x_ms_umask);
}
if let Some(if_match) = if_match {
req_builder = req_builder.header("If-Match", if_match);
}
if let Some(if_none_match) = if_none_match {
req_builder = req_builder.header("If-None-Match", if_none_match);
}
if let Some(if_modified_since) = if_modified_since {
req_builder = req_builder.header("If-Modified-Since", if_modified_since);
}
if let Some(if_unmodified_since) = if_unmodified_since {
req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since);
}
if let Some(x_ms_source_if_match) = x_ms_source_if_match {
req_builder = req_builder.header("x-ms-source-if-match", x_ms_source_if_match);
}
if let Some(x_ms_source_if_none_match) = x_ms_source_if_none_match {
req_builder = req_builder.header("x-ms-source-if-none-match", x_ms_source_if_none_match);
}
if let Some(x_ms_source_if_modified_since) = x_ms_source_if_modified_since {
req_builder = req_builder.header("x-ms-source-if-modified-since", x_ms_source_if_modified_since);
}
if let Some(x_ms_source_if_unmodified_since) = x_ms_source_if_unmodified_since {
req_builder = req_builder.header("x-ms-source-if-unmodified-since", x_ms_source_if_unmodified_since);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn update(
operation_config: &crate::OperationConfig,
filesystem: &str,
path: &str,
x_ms_client_request_id: Option<&str>,
timeout: Option<i64>,
x_ms_version: &str,
action: &str,
max_records: Option<i32>,
continuation: Option<&str>,
mode: &str,
force_flag: Option<bool>,
position: Option<i64>,
retain_uncommitted_data: Option<bool>,
close: Option<bool>,
content_length: Option<i64>,
x_ms_content_md5: Option<&str>,
x_ms_lease_id: Option<&str>,
x_ms_cache_control: Option<&str>,
x_ms_content_type: Option<&str>,
x_ms_content_disposition: Option<&str>,
x_ms_content_encoding: Option<&str>,
x_ms_content_language: Option<&str>,
x_ms_properties: Option<&str>,
x_ms_owner: Option<&str>,
x_ms_group: Option<&str>,
x_ms_permissions: Option<&str>,
x_ms_acl: Option<&str>,
if_match: Option<&str>,
if_none_match: Option<&str>,
if_modified_since: Option<&str>,
if_unmodified_since: Option<&str>,
body: &serde_json::Value,
) -> std::result::Result<update::Response, update::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}/{}", operation_config.base_path(), filesystem, path);
let mut url = url::Url::parse(url_str).map_err(update::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(update::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
url.query_pairs_mut().append_pair("action", action);
if let Some(max_records) = max_records {
url.query_pairs_mut().append_pair("maxRecords", max_records.to_string().as_str());
}
if let Some(continuation) = continuation {
url.query_pairs_mut().append_pair("continuation", continuation);
}
url.query_pairs_mut().append_pair("mode", mode);
if let Some(force_flag) = force_flag {
url.query_pairs_mut().append_pair("forceFlag", force_flag.to_string().as_str());
}
if let Some(position) = position {
url.query_pairs_mut().append_pair("position", position.to_string().as_str());
}
if let Some(retain_uncommitted_data) = retain_uncommitted_data {
url.query_pairs_mut()
.append_pair("retainUncommittedData", retain_uncommitted_data.to_string().as_str());
}
if let Some(close) = close {
url.query_pairs_mut().append_pair("close", close.to_string().as_str());
}
if let Some(content_length) = content_length {
req_builder = req_builder.header("Content-Length", content_length);
}
if let Some(x_ms_content_md5) = x_ms_content_md5 {
req_builder = req_builder.header("x-ms-content-md5", x_ms_content_md5);
}
if let Some(x_ms_lease_id) = x_ms_lease_id {
req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id);
}
if let Some(x_ms_cache_control) = x_ms_cache_control {
req_builder = req_builder.header("x-ms-cache-control", x_ms_cache_control);
}
if let Some(x_ms_content_type) = x_ms_content_type {
req_builder = req_builder.header("x-ms-content-type", x_ms_content_type);
}
if let Some(x_ms_content_disposition) = x_ms_content_disposition {
req_builder = req_builder.header("x-ms-content-disposition", x_ms_content_disposition);
}
if let Some(x_ms_content_encoding) = x_ms_content_encoding {
req_builder = req_builder.header("x-ms-content-encoding", x_ms_content_encoding);
}
if let Some(x_ms_content_language) = x_ms_content_language {
req_builder = req_builder.header("x-ms-content-language", x_ms_content_language);
}
if let Some(x_ms_properties) = x_ms_properties {
req_builder = req_builder.header("x-ms-properties", x_ms_properties);
}
if let Some(x_ms_owner) = x_ms_owner {
req_builder = req_builder.header("x-ms-owner", x_ms_owner);
}
if let Some(x_ms_group) = x_ms_group {
req_builder = req_builder.header("x-ms-group", x_ms_group);
}
if let Some(x_ms_permissions) = x_ms_permissions {
req_builder = req_builder.header("x-ms-permissions", x_ms_permissions);
}
if let Some(x_ms_acl) = x_ms_acl {
req_builder = req_builder.header("x-ms-acl", x_ms_acl);
}
if let Some(if_match) = if_match {
req_builder = req_builder.header("If-Match", if_match);
}
if let Some(if_none_match) = if_none_match {
req_builder = req_builder.header("If-None-Match", if_none_match);
}
if let Some(if_modified_since) = if_modified_since {
req_builder = req_builder.header("If-Modified-Since", if_modified_since);
}
if let Some(if_unmodified_since) = if_unmodified_since {
req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since);
}
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(update::Error::SerializeError)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(update::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(update::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SetAccessControlRecursiveResponse =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(update::Response::Ok200(rsp_value))
}
http::StatusCode::ACCEPTED => Ok(update::Response::Accepted202),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| update::Error::DeserializeError(source, rsp_body.clone()))?;
Err(update::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod update {
use super::{models, API_VERSION};
#[derive(Debug)]
pub enum Response {
Ok200(models::SetAccessControlRecursiveResponse),
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
filesystem: &str,
path: &str,
x_ms_client_request_id: Option<&str>,
timeout: Option<i64>,
x_ms_version: &str,
recursive: Option<bool>,
continuation: Option<&str>,
x_ms_lease_id: Option<&str>,
if_match: Option<&str>,
if_none_match: Option<&str>,
if_modified_since: Option<&str>,
if_unmodified_since: Option<&str>,
) -> std::result::Result<(), delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}/{}", operation_config.base_path(), filesystem, path);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
if let Some(recursive) = recursive {
url.query_pairs_mut().append_pair("recursive", recursive.to_string().as_str());
}
if let Some(continuation) = continuation {
url.query_pairs_mut().append_pair("continuation", continuation);
}
if let Some(x_ms_lease_id) = x_ms_lease_id {
req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id);
}
if let Some(if_match) = if_match {
req_builder = req_builder.header("If-Match", if_match);
}
if let Some(if_none_match) = if_none_match {
req_builder = req_builder.header("If-None-Match", if_none_match);
}
if let Some(if_modified_since) = if_modified_since {
req_builder = req_builder.header("If-Modified-Since", if_modified_since);
}
if let Some(if_unmodified_since) = if_unmodified_since {
req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_properties(
operation_config: &crate::OperationConfig,
filesystem: &str,
path: &str,
x_ms_client_request_id: Option<&str>,
timeout: Option<i64>,
x_ms_version: &str,
action: Option<&str>,
upn: Option<bool>,
x_ms_lease_id: Option<&str>,
if_match: Option<&str>,
if_none_match: Option<&str>,
if_modified_since: Option<&str>,
if_unmodified_since: Option<&str>,
) -> std::result::Result<(), get_properties::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}/{}", operation_config.base_path(), filesystem, path);
let mut url = url::Url::parse(url_str).map_err(get_properties::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::HEAD);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_properties::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
if let Some(action) = action {
url.query_pairs_mut().append_pair("action", action);
}
if let Some(upn) = upn {
url.query_pairs_mut().append_pair("upn", upn.to_string().as_str());
}
if let Some(x_ms_lease_id) = x_ms_lease_id {
req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id);
}
if let Some(if_match) = if_match {
req_builder = req_builder.header("If-Match", if_match);
}
if let Some(if_none_match) = if_none_match {
req_builder = req_builder.header("If-None-Match", if_none_match);
}
if let Some(if_modified_since) = if_modified_since {
req_builder = req_builder.header("If-Modified-Since", if_modified_since);
}
if let Some(if_unmodified_since) = if_unmodified_since {
req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_properties::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_properties::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| get_properties::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_properties::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_properties {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn set_access_control(
operation_config: &crate::OperationConfig,
filesystem: &str,
path: &str,
action: &str,
timeout: Option<i64>,
x_ms_lease_id: Option<&str>,
x_ms_owner: Option<&str>,
x_ms_group: Option<&str>,
x_ms_permissions: Option<&str>,
x_ms_acl: Option<&str>,
if_match: Option<&str>,
if_none_match: Option<&str>,
if_modified_since: Option<&str>,
if_unmodified_since: Option<&str>,
x_ms_client_request_id: Option<&str>,
x_ms_version: &str,
) -> std::result::Result<(), set_access_control::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}/{}?action=setAccessControl", operation_config.base_path(), filesystem, path);
let mut url = url::Url::parse(url_str).map_err(set_access_control::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(set_access_control::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("action", action);
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
if let Some(x_ms_lease_id) = x_ms_lease_id {
req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id);
}
if let Some(x_ms_owner) = x_ms_owner {
req_builder = req_builder.header("x-ms-owner", x_ms_owner);
}
if let Some(x_ms_group) = x_ms_group {
req_builder = req_builder.header("x-ms-group", x_ms_group);
}
if let Some(x_ms_permissions) = x_ms_permissions {
req_builder = req_builder.header("x-ms-permissions", x_ms_permissions);
}
if let Some(x_ms_acl) = x_ms_acl {
req_builder = req_builder.header("x-ms-acl", x_ms_acl);
}
if let Some(if_match) = if_match {
req_builder = req_builder.header("If-Match", if_match);
}
if let Some(if_none_match) = if_none_match {
req_builder = req_builder.header("If-None-Match", if_none_match);
}
if let Some(if_modified_since) = if_modified_since {
req_builder = req_builder.header("If-Modified-Since", if_modified_since);
}
if let Some(if_unmodified_since) = if_unmodified_since {
req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since);
}
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(set_access_control::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(set_access_control::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError = serde_json::from_slice(rsp_body)
.map_err(|source| set_access_control::Error::DeserializeError(source, rsp_body.clone()))?;
Err(set_access_control::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod set_access_control {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn set_access_control_recursive(
operation_config: &crate::OperationConfig,
filesystem: &str,
path: &str,
action: &str,
timeout: Option<i64>,
continuation: Option<&str>,
mode: &str,
force_flag: Option<bool>,
max_records: Option<i32>,
x_ms_acl: Option<&str>,
x_ms_client_request_id: Option<&str>,
x_ms_version: &str,
) -> std::result::Result<models::SetAccessControlRecursiveResponse, set_access_control_recursive::Error> {
let http_client = operation_config.http_client();
let url_str = &format!(
"{}/{}/{}?action=setAccessControlRecursive",
operation_config.base_path(),
filesystem,
path
);
let mut url = url::Url::parse(url_str).map_err(set_access_control_recursive::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(set_access_control_recursive::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("action", action);
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
if let Some(continuation) = continuation {
url.query_pairs_mut().append_pair("continuation", continuation);
}
url.query_pairs_mut().append_pair("mode", mode);
if let Some(force_flag) = force_flag {
url.query_pairs_mut().append_pair("forceFlag", force_flag.to_string().as_str());
}
if let Some(max_records) = max_records {
url.query_pairs_mut().append_pair("maxRecords", max_records.to_string().as_str());
}
if let Some(x_ms_acl) = x_ms_acl {
req_builder = req_builder.header("x-ms-acl", x_ms_acl);
}
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.map_err(set_access_control_recursive::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(set_access_control_recursive::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::SetAccessControlRecursiveResponse = serde_json::from_slice(rsp_body)
.map_err(|source| set_access_control_recursive::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError = serde_json::from_slice(rsp_body)
.map_err(|source| set_access_control_recursive::Error::DeserializeError(source, rsp_body.clone()))?;
Err(set_access_control_recursive::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod set_access_control_recursive {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn flush_data(
operation_config: &crate::OperationConfig,
filesystem: &str,
path: &str,
action: &str,
timeout: Option<i64>,
position: Option<i64>,
retain_uncommitted_data: Option<bool>,
close: Option<bool>,
content_length: Option<i64>,
x_ms_content_md5: Option<&str>,
x_ms_lease_id: Option<&str>,
x_ms_cache_control: Option<&str>,
x_ms_content_type: Option<&str>,
x_ms_content_disposition: Option<&str>,
x_ms_content_encoding: Option<&str>,
x_ms_content_language: Option<&str>,
if_match: Option<&str>,
if_none_match: Option<&str>,
if_modified_since: Option<&str>,
if_unmodified_since: Option<&str>,
x_ms_client_request_id: Option<&str>,
x_ms_version: &str,
) -> std::result::Result<(), flush_data::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}/{}?action=flush", operation_config.base_path(), filesystem, path);
let mut url = url::Url::parse(url_str).map_err(flush_data::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(flush_data::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("action", action);
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
if let Some(position) = position {
url.query_pairs_mut().append_pair("position", position.to_string().as_str());
}
if let Some(retain_uncommitted_data) = retain_uncommitted_data {
url.query_pairs_mut()
.append_pair("retainUncommittedData", retain_uncommitted_data.to_string().as_str());
}
if let Some(close) = close {
url.query_pairs_mut().append_pair("close", close.to_string().as_str());
}
if let Some(content_length) = content_length {
req_builder = req_builder.header("Content-Length", content_length);
}
if let Some(x_ms_content_md5) = x_ms_content_md5 {
req_builder = req_builder.header("x-ms-content-md5", x_ms_content_md5);
}
if let Some(x_ms_lease_id) = x_ms_lease_id {
req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id);
}
if let Some(x_ms_cache_control) = x_ms_cache_control {
req_builder = req_builder.header("x-ms-cache-control", x_ms_cache_control);
}
if let Some(x_ms_content_type) = x_ms_content_type {
req_builder = req_builder.header("x-ms-content-type", x_ms_content_type);
}
if let Some(x_ms_content_disposition) = x_ms_content_disposition {
req_builder = req_builder.header("x-ms-content-disposition", x_ms_content_disposition);
}
if let Some(x_ms_content_encoding) = x_ms_content_encoding {
req_builder = req_builder.header("x-ms-content-encoding", x_ms_content_encoding);
}
if let Some(x_ms_content_language) = x_ms_content_language {
req_builder = req_builder.header("x-ms-content-language", x_ms_content_language);
}
if let Some(if_match) = if_match {
req_builder = req_builder.header("If-Match", if_match);
}
if let Some(if_none_match) = if_none_match {
req_builder = req_builder.header("If-None-Match", if_none_match);
}
if let Some(if_modified_since) = if_modified_since {
req_builder = req_builder.header("If-Modified-Since", if_modified_since);
}
if let Some(if_unmodified_since) = if_unmodified_since {
req_builder = req_builder.header("If-Unmodified-Since", if_unmodified_since);
}
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(flush_data::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(flush_data::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| flush_data::Error::DeserializeError(source, rsp_body.clone()))?;
Err(flush_data::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod flush_data {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn append_data(
operation_config: &crate::OperationConfig,
filesystem: &str,
path: &str,
action: &str,
position: Option<i64>,
timeout: Option<i64>,
content_length: Option<i64>,
content_md5: Option<&str>,
x_ms_content_crc64: Option<&str>,
x_ms_lease_id: Option<&str>,
body: &serde_json::Value,
x_ms_client_request_id: Option<&str>,
x_ms_version: &str,
) -> std::result::Result<(), append_data::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}/{}?action=append", operation_config.base_path(), filesystem, path);
let mut url = url::Url::parse(url_str).map_err(append_data::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(append_data::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("action", action);
if let Some(position) = position {
url.query_pairs_mut().append_pair("position", position.to_string().as_str());
}
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
if let Some(content_length) = content_length {
req_builder = req_builder.header("Content-Length", content_length);
}
if let Some(content_md5) = content_md5 {
req_builder = req_builder.header("Content-MD5", content_md5);
}
if let Some(x_ms_content_crc64) = x_ms_content_crc64 {
req_builder = req_builder.header("x-ms-content-crc64", x_ms_content_crc64);
}
if let Some(x_ms_lease_id) = x_ms_lease_id {
req_builder = req_builder.header("x-ms-lease-id", x_ms_lease_id);
}
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(body).map_err(append_data::Error::SerializeError)?;
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(append_data::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(append_data::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::ACCEPTED => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| append_data::Error::DeserializeError(source, rsp_body.clone()))?;
Err(append_data::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod append_data {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn set_expiry(
operation_config: &crate::OperationConfig,
filesystem: &str,
path: &str,
comp: &str,
timeout: Option<i64>,
x_ms_version: &str,
x_ms_client_request_id: Option<&str>,
x_ms_expiry_option: &str,
x_ms_expiry_time: Option<&str>,
) -> std::result::Result<(), set_expiry::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}/{}?comp=expiry", operation_config.base_path(), filesystem, path);
let mut url = url::Url::parse(url_str).map_err(set_expiry::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(set_expiry::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("comp", comp);
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
req_builder = req_builder.header("x-ms-expiry-option", x_ms_expiry_option);
if let Some(x_ms_expiry_time) = x_ms_expiry_time {
req_builder = req_builder.header("x-ms-expiry-time", x_ms_expiry_time);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(set_expiry::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(set_expiry::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| set_expiry::Error::DeserializeError(source, rsp_body.clone()))?;
Err(set_expiry::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod set_expiry {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn undelete(
operation_config: &crate::OperationConfig,
filesystem: &str,
path: &str,
comp: &str,
timeout: Option<i64>,
x_ms_undelete_source: Option<&str>,
x_ms_version: &str,
x_ms_client_request_id: Option<&str>,
) -> std::result::Result<(), undelete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/{}/{}?comp=undelete", operation_config.base_path(), filesystem, path);
let mut url = url::Url::parse(url_str).map_err(undelete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(undelete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("comp", comp);
if let Some(timeout) = timeout {
url.query_pairs_mut().append_pair("timeout", timeout.to_string().as_str());
}
if let Some(x_ms_undelete_source) = x_ms_undelete_source {
req_builder = req_builder.header("x-ms-undelete-source", x_ms_undelete_source);
}
req_builder = req_builder.header("x-ms-version", x_ms_version);
if let Some(x_ms_client_request_id) = x_ms_client_request_id {
req_builder = req_builder.header("x-ms-client-request-id", x_ms_client_request_id);
}
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(undelete::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(undelete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::StorageError =
serde_json::from_slice(rsp_body).map_err(|source| undelete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(undelete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod undelete {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::StorageError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
#![allow(dead_code, unused_macros)]
extern crate env_logger;
use std::path::PathBuf;
macro_rules! config_test {
($name:ident, $prefix:expr) => {
#[macro_use]
extern crate log;
#[test]
fn test_prim() {
common::init_test();
let password = "hunter2";
let config = libpasta::Config::with_primitive($name::default());
trace!("config setup as: {}", config.to_string());
let password_hash = config.hash_password(password);
assert!(password_hash.starts_with($prefix));
assert!(libpasta::verify_password(&password_hash, password));
}
};
}
pub fn get_test_path(filename: &str) -> PathBuf {
let mut path = PathBuf::from(file!());
path.pop();
path.pop();
path.push(filename);
path
}
pub fn init_test() {
self::env_logger::init();
}
|
use anyhow::Result;
use structopt::StructOpt;
use sparktop::{
event::{Event, EventStream, Next},
sprocs::SProcs,
view::View,
};
#[derive(StructOpt)]
struct Opt {
#[structopt(short, default_value = "1.")]
delay: f64,
// weight given to new samples.
#[structopt(short, default_value = "0.5")]
ewma_weight: f64,
}
fn main() -> Result<()> {
// TODO: do something with logs so they appear in special debug pane?
std::env::set_var("RUST_LOG", "info");
// std::env::set_var("RUST_LOG", "debug");
pretty_env_logger::init();
let opt = Opt::from_args();
let mut sprocs = SProcs::default();
let mut view = View::default();
let events = EventStream::new(std::time::Duration::from_secs_f64(opt.delay));
// hmm, maybe can restructure so that quitting gets injected as an event,
// which halts the EventStream iterator?
for event in events {
let next = match event {
Event::Resize => Next::Continue,
Event::Key(k) => view.handle_key(k),
Event::Tick => {
sprocs.update(opt.ewma_weight);
Next::Continue
}
};
match next {
Next::Continue => view.draw(&mut sprocs.get().collect())?,
Next::Quit => break,
}
}
Ok(())
}
|
#![feature(async_await)]
#[macro_use]
extern crate rental;
use async_std::sync::RwLock;
use async_std::{io, net};
use std::net::ToSocketAddrs;
use std::str;
use std::sync::Arc;
use async_trait::async_trait;
use path_tree::PathTree;
mod app;
mod date;
mod request;
mod response;
pub mod middleware;
pub use crate::app::*;
pub use crate::middleware::Middleware;
pub use crate::request::*;
pub use crate::response::*;
#[async_trait]
pub trait Handler: Send + Sync {
async fn call(&self, request: Request<'_>, params: Params<'_>) -> io::Result<Response>;
}
#[derive(Default)]
pub struct H1 {
router: PathTree<Box<dyn Handler>>,
middleware: Vec<Box<dyn Middleware>>,
}
impl H1 {
pub fn get<H: Handler + Sized + 'static>(mut self, route: impl AsRef<str>, handle: H) -> Self {
self.router
.insert(&format!("/GET/{}", route.as_ref()), Box::new(handle));
self
}
/// Add middleware.
pub fn using<H: Middleware + Sized + 'static>(mut self, middleware: H) -> Self {
self.middleware.push(Box::new(middleware));
self
}
/// Start listening on this address.
pub async fn listen<A: ToSocketAddrs>(self, addrs: A) -> io::Result<App> {
let listener = net::TcpListener::bind(addrs).await?;
let Self {
middleware, router, ..
} = self;
Ok(App {
listener,
middleware: Arc::new(RwLock::new(middleware)),
router: Arc::new(RwLock::new(router)),
})
}
}
|
extern {
fn ext_print_num(data: i64);
fn ext_print_utf8(offset: i32, size: i32);
fn ext_print_hex(offset: i32, size: i32);
fn ext_malloc(size: i32) -> i32;
fn ext_free(addr: i32);
fn ext_get_storage_into(key_data: i32, key_len: i32, value_data: i32, value_len: i32, value_offset: i32) -> i32;
fn ext_set_storage(key_data: i32, key_len: i32, value_data: i32, value_len: i32);
fn ext_storage_root(result_ptr: i32);
fn ext_get_allocated_storage(key_data: i32, key_len: i32, written_out: i32) -> i32;
fn ext_clear_storage(key_data: i32, key_len: i32);
fn ext_clear_prefix(prefix_data: i32, prefix_len: i32);
fn ext_blake2_256_enumerated_trie_root(values_data: i32, lens_data: i32, lens_len: i32, result: i32);
fn ext_blake2_128(data: i32, length: i32, out: i32);
fn ext_blake2_256(data: i32, length: i32, out: i32);
fn ext_twox_64(data: i32, length: i32, out: i32);
fn ext_twox_128(data: i32, length: i32, out: i32);
fn ext_keccak_256(data: i32, length: i32, out: i32);
fn ext_ed25519_generate(id_data: i32, seed: i32, seed_len: i32, out: i32);
fn ext_ed25519_verify(msg_data: i32, msg_len: i32, sig_data: i32, pubkey_data: i32) -> i32;
fn ext_sr25519_generate(id_data: i32, seed: i32, seed_len: i32, out: i32);
fn ext_sr25519_public_keys(id_data: i32, result_len: i32) -> i32;
fn ext_sr25519_sign(id_data: i32, pubkey_data: i32, msg_data: i32, msg_len: i32, out: i32) -> i32;
fn ext_sr25519_verify(msg_data: i32, msg_len: i32, sig_data: i32, pubkey_data: i32) -> i32;
fn ext_secp256k1_ecdsa_recover(msg_data: i32, sig_data: i32, pubkey_data: i32) -> i32;
fn ext_is_validator() -> i32;
fn ext_local_storage_set(kind: i32, key: i32, key_len: i32, value: i32, value_len: i32);
fn ext_local_storage_get(kind: i32, key: i32, key_len: i32, value_len: i32) -> i32;
fn ext_local_storage_compare_and_set(kind: i32, key: i32, key_len: i32, old_value: i32, old_value_len: i32, new_value: i32, new_value_len: i32) -> i32;
fn ext_network_state(written_out: i32) -> i32;
fn ext_submit_transaction(data: i32, len: i32) -> i32;
}
#[no_mangle]
pub extern fn test_ext_print_num(data: i64) {
unsafe {
ext_print_num(data);
}
}
#[no_mangle]
pub extern fn test_ext_print_utf8(offset: i32, size: i32) {
unsafe {
ext_print_utf8(offset, size);
}
}
#[no_mangle]
pub extern fn test_ext_print_hex(offset: i32, size: i32) {
unsafe {
ext_print_hex(offset, size);
}
}
#[no_mangle]
pub extern fn test_ext_malloc(size: i32) -> i32 {
unsafe {
ext_malloc(size)
}
}
#[no_mangle]
pub extern fn test_ext_free(addr: i32) {
unsafe {
ext_free(addr);
}
}
#[no_mangle]
pub extern fn test_ext_get_storage_into(key_data: i32, key_len: i32, value_data: i32, value_len: i32, value_offset: i32) -> i32 {
unsafe {
ext_get_storage_into(key_data, key_len, value_data, value_len, value_offset)
}
}
#[no_mangle]
pub extern fn test_ext_set_storage(key_data: i32, key_len: i32, value_data: i32, value_len: i32) {
unsafe {
ext_set_storage(key_data, key_len, value_data, value_len)
}
}
#[no_mangle]
pub extern fn test_ext_storage_root(result_ptr: i32) {
unsafe {
ext_storage_root(result_ptr)
}
}
#[no_mangle]
pub extern fn test_ext_get_allocated_storage(key_data: i32, key_len: i32, written_out: i32) -> i32 {
unsafe {
ext_get_allocated_storage(key_data, key_len, written_out)
}
}
#[no_mangle]
pub extern fn test_ext_clear_storage(key_data: i32, key_len: i32) {
unsafe {
ext_clear_storage(key_data, key_len)
}
}
#[no_mangle]
pub extern fn test_ext_clear_prefix(prefix_data: i32, prefix_len: i32) {
unsafe {
ext_clear_prefix(prefix_data, prefix_len)
}
}
#[no_mangle]
pub extern fn test_ext_blake2_256_enumerated_trie_root(values_data: i32, lens_data: i32, lens_len: i32, result: i32) {
unsafe {
ext_blake2_256_enumerated_trie_root(values_data, lens_data, lens_len, result)
}
}
#[no_mangle]
pub extern fn test_ext_blake2_256(data: i32, length: i32, out: i32) {
unsafe {
ext_blake2_256(data, length, out)
}
}
#[no_mangle]
pub extern fn test_ext_blake2_128(data: i32, length: i32, out: i32) {
unsafe {
ext_blake2_128(data, length, out)
}
}
#[no_mangle]
pub extern fn test_ext_ed25519_generate(id_data: i32, seed: i32, seed_len: i32, out: i32) {
unsafe {
ext_ed25519_generate(id_data, seed, seed_len, out)
}
}
#[no_mangle]
pub extern fn test_ext_ed25519_verify(msg_data: i32, msg_len: i32, sig_data: i32, pubkey_data: i32) -> i32 {
unsafe {
ext_ed25519_verify(msg_data, msg_len, sig_data, pubkey_data)
}
}
#[no_mangle]
pub extern fn test_ext_twox_64(data: i32, length: i32, out: i32) {
unsafe {
ext_twox_64(data, length, out)
}
}
#[no_mangle]
pub extern fn test_ext_twox_128(data: i32, length: i32, out: i32) {
unsafe {
ext_twox_128(data, length, out)
}
}
#[no_mangle]
pub extern fn test_ext_keccak_256(data: i32, length: i32, out: i32) {
unsafe {
ext_keccak_256(data, length, out)
}
}
#[no_mangle]
pub extern fn test_ext_sr25519_generate(id_data: i32, seed: i32, seed_len: i32, out: i32) {
unsafe {
ext_sr25519_generate(id_data, seed, seed_len, out)
}
}
#[no_mangle]
pub extern fn test_ext_sr25519_public_keys(id_data: i32, result_len: i32) -> i32 {
unsafe {
ext_sr25519_public_keys(id_data, result_len)
}
}
#[no_mangle]
pub extern fn test_ext_sr25519_sign(id_data: i32, pubkey_data: i32, msg_data: i32, msg_len: i32, out: i32) -> i32 {
unsafe {
ext_sr25519_sign(id_data, pubkey_data, msg_data, msg_len, out)
}
}
#[no_mangle]
pub extern fn test_ext_sr25519_verify(msg_data: i32, msg_len: i32, sig_data: i32, pubkey_data: i32) -> i32 {
unsafe {
ext_sr25519_verify(msg_data, msg_len, sig_data, pubkey_data)
}
}
#[no_mangle]
pub extern fn test_ext_secp256k1_ecdsa_recover(msg_data: i32, sig_data: i32, pubkey_data: i32) -> i32 {
unsafe {
ext_secp256k1_ecdsa_recover(msg_data, sig_data, pubkey_data)
}
}
#[no_mangle]
pub extern fn test_ext_is_validator() -> i32 {
unsafe {
ext_is_validator()
}
}
#[no_mangle]
pub extern fn test_ext_local_storage_set(kind: i32, key: i32, key_len: i32, value: i32, value_len: i32) {
unsafe {
ext_local_storage_set(kind, key, key_len, value, value_len)
}
}
#[no_mangle]
pub extern fn test_ext_local_storage_get(kind: i32, key: i32, key_len: i32, value_len: i32) -> i32 {
unsafe {
ext_local_storage_get(kind, key, key_len, value_len)
}
}
#[no_mangle]
pub extern fn test_ext_local_storage_compare_and_set(kind: i32, key: i32, key_len: i32, old_value: i32, old_value_len: i32, new_value: i32, new_value_len: i32) -> i32 {
unsafe {
ext_local_storage_compare_and_set(kind, key, key_len, old_value, old_value_len, new_value, new_value_len)
}
}
#[no_mangle]
pub extern fn test_ext_network_state(written_out: i32) -> i32 {
unsafe {
ext_network_state(written_out)
}
}
#[no_mangle]
pub extern fn test_ext_submit_transaction(data: i32, len: i32) -> i32 {
unsafe {
ext_submit_transaction(data, len)
}
}
|
#[cfg(all(test, feature = "futures"))]
mod tests {
use futures::future::{empty, err, ok, Future};
use inspector::FutureInspector;
#[test]
fn future_not_ready() {
let mut inspect_called = false;
let f = empty::<(), ()>();
let _ = f.inspect_err(|_| inspect_called = true).poll();
assert!(!inspect_called);
}
#[test]
fn future_ready() {
let mut inspect_called = false;
let f = ok::<_, ()>(());
let _ = f.inspect_err(|_| inspect_called = true).wait();
assert!(!inspect_called);
}
#[test]
fn future_err() {
let mut inspect_called = false;
let f = err::<(), _>(0);
let _ = f
.inspect_err(|e| {
inspect_called = true;
assert_eq!(*e, 0);
})
.wait();
if cfg!(any(debug_assertions, not(feature = "debug-only"))) {
assert!(inspect_called);
} else {
assert!(!inspect_called);
}
}
}
|
//! Linux-specific things for Heim disk data collection.
use heim::disk::Partition;
pub fn get_device_name(partition: &Partition) -> String {
if let Some(device) = partition.device() {
// See if this disk is actually mounted elsewhere on Linux...
// This is a workaround to properly map I/O in some cases (i.e. disk encryption), see
// https://github.com/ClementTsang/bottom/issues/419
if let Ok(path) = std::fs::read_link(device) {
if path.is_absolute() {
path.into_os_string()
} else {
let mut combined_path = std::path::PathBuf::new();
combined_path.push(device);
combined_path.pop(); // Pop the current file...
combined_path.push(path);
if let Ok(canon_path) = std::fs::canonicalize(combined_path) {
// Resolve the local path into an absolute one...
canon_path.into_os_string()
} else {
device.to_os_string()
}
}
} else {
device.to_os_string()
}
.into_string()
.unwrap_or_else(|_| "Name Unavailable".to_string())
} else {
"Name Unavailable".to_string()
}
}
|
#![allow(
clippy::from_iter_instead_of_collect, // I like calling from_iter, damnit.
)]
use std::{convert::TryInto, iter::FromIterator, num::NonZeroU64, ops::Not};
use flac_rs::{
encoder::Block,
frame::Subblock,
headers::{
BitsPerSample, BlockSize, ChannelCount, FrameSize, MetadataBlockStreamInfo, SampleRate,
SamplesInStream,
},
HeaderWriter, BLOCK_SIZE,
};
use md5::Md5;
fn main() {
let wavfile = dbg!(std::env::args()).nth(1).unwrap();
let mut wavfile = std::fs::File::open(wavfile).unwrap();
let (wavheader, body) = wav::read(&mut wavfile).unwrap();
let mut stream_info = streaminfo_from_wav(&wavheader).unwrap();
stream_info.samples_in_stream = SamplesInStream::Count(
NonZeroU64::new(
match &body {
wav::BitDepth::Eight(samples) => samples.len() as u64,
wav::BitDepth::Sixteen(samples) => samples.len() as u64,
wav::BitDepth::TwentyFour(samples) => samples.len() as u64,
wav::BitDepth::ThirtyTwoFloat(samples) => samples.len() as u64,
wav::BitDepth::Empty => panic!("empty wav file"),
} / wavheader.channel_count as u64,
)
.unwrap(),
);
assert_eq!(stream_info.bits_per_sample.inner(), 16);
let block_iter = body
.as_sixteen()
.expect("sixteen bit body")
.chunks(flac_rs::BLOCK_SIZE as usize * stream_info.channels as usize)
.map(|block| {
let mut channels = vec![Vec::new(); stream_info.channels as usize];
let mut i = 0;
// Collate samples from input subblock, round robin style.
for sample in block {
channels[i].push(*sample);
i = (i + 1) % stream_info.channels as u8 as usize;
}
Vec::from_iter(channels.into_iter().map(|data| Subblock { data }))
});
let writer: HeaderWriter<_, i16> = HeaderWriter::new(
std::fs::File::create("/tmp/out.flac").unwrap(),
stream_info.clone(),
);
let mut writer = writer
.write_headers(std::iter::empty())
.expect("writing headers");
for (blocknum, block) in block_iter.enumerate() {
debug_assert!(block.is_empty().not());
let block = Block::from_input(block);
let frame = block
.encode(&stream_info, blocknum as u64 * BLOCK_SIZE as u64)
.expect("cannot create frame");
writer.write_frame(frame).expect("cannot write frame");
}
}
fn streaminfo_from_wav(wavheader: &wav::Header) -> Option<MetadataBlockStreamInfo> {
Some(MetadataBlockStreamInfo {
min_block_size: BlockSize::new(BLOCK_SIZE as u16)?,
max_block_size: BlockSize::new(BLOCK_SIZE as u16)?,
min_frame_size: FrameSize::new(0)?,
max_frame_size: FrameSize::new(0)?,
sample_rate: SampleRate::new(wavheader.sampling_rate)?,
channels: ChannelCount::new(wavheader.channel_count)?,
bits_per_sample: BitsPerSample::new(wavheader.bits_per_sample.try_into().ok()?)?,
samples_in_stream: SamplesInStream::Unknown, // Set with info from body.
md5_signature: Md5::default(),
})
}
|
use divisors;
use std::time::{Instant};
fn main() {
let n: u128 = 934832147123321;
println!("finding divisors of {}", n);
let start_time = Instant::now();
let v = divisors::get_divisors(n);
println!("time = {:?}, divisors = {:?}", start_time.elapsed(), v);
}
|
pub mod parser;
pub mod runtime;
pub mod tokenizer;
#[cfg(test)]
mod tests {
use expr_eval::{evaluator::Environment, val::Val};
use crate::{parser::parse, runtime, tokenizer::tokenize};
#[test]
fn while_loop() {
let mut env = Environment::new();
let code =
"a=0
while a <10 {
a = a + 1
}";
let instructions = tokenize(&code);
let ast = parse(&instructions).unwrap();
runtime::execute(&ast, &mut env).unwrap();
assert_eq!(env.get_ref("a"), Ok(&Val::Number(10.0)));
}
#[test]
fn vector_declaration() {
let mut env = Environment::new();
let code = r#"a = [1+2,3*4,true,[1,2,3],"hello"]"#;
let instructions = tokenize(&code);
let ast = parse(&instructions).unwrap();
runtime::execute(&ast, &mut env).unwrap();
assert_eq!(
env.get_ref("a"),
Ok(&Val::Vec(vec![
Val::Number(3.0),
Val::Number(12.0),
Val::Bool(true),
Val::Vec(vec![Val::Number(1.0), Val::Number(2.0), Val::Number(3.0)]),
Val::Str("hello".to_owned())
]))
);
}
#[test]
fn vector_read() {
let mut env = Environment::new();
let code =
"a = [1,2,3]
b = a[1]";
let instructions = tokenize(&code);
let ast = parse(&instructions).unwrap();
runtime::execute(&ast, &mut env).unwrap();
assert_eq!(env.get_ref("b"), Ok(&Val::Number(2.0)));
}
#[test]
fn vector_write() {
let mut env = Environment::new();
let code =
"a=0
b=[0,0,0]
while a < 3 {
b[a]=a
a = a + 1
}";
let instructions = tokenize(&code);
let ast = parse(&instructions).unwrap();
runtime::execute(&ast, &mut env).unwrap();
assert_eq!(
env.get_ref("b"),
Ok(&Val::Vec(vec![
Val::Number(0.0),
Val::Number(1.0),
Val::Number(2.0)
]))
);
}
#[test]
fn vector_2d_read() {
let mut env = Environment::new();
let code =
"a=[[1,2,3],[4,5,6],[7,8,9]]
b=a[1][1]";
let instructions = tokenize(&code);
let ast = parse(&instructions).unwrap();
runtime::execute(&ast, &mut env).unwrap();
assert_eq!(env.get_ref("b"), Ok(&Val::Number(5.0)));
}
#[test]
fn vector_3d_read() {
let mut env = Environment::new();
let code =
"a=[[[5]]]
b=a[0][0][0]";
let instructions = tokenize(&code);
let ast = parse(&instructions).unwrap();
runtime::execute(&ast, &mut env).unwrap();
assert_eq!(env.get_ref("b"), Ok(&Val::Number(5.0)));
}
#[test]
fn vector_2d_write() {
let mut env = Environment::new();
let code =
"a=[[1,2,3],[4,5,6],[7,8,9]]
a[1][1]=0";
let instructions = tokenize(&code);
// dbg!(&instructions);
let ast = parse(&instructions).unwrap();
// dbg!(&ast);
runtime::execute(&ast, &mut env).unwrap();
assert_eq!(
env.get_ref("a"),
Ok(&Val::Vec(vec![
Val::Vec(vec![Val::Number(1.0), Val::Number(2.0), Val::Number(3.0),]),
Val::Vec(vec![Val::Number(4.0), Val::Number(0.0), Val::Number(6.0),]),
Val::Vec(vec![Val::Number(7.0), Val::Number(8.0), Val::Number(9.0),])
]))
);
}
#[test]
fn vector_copy() {
let mut env = Environment::new();
let code =
"a=0
b=[1,2,3]
c=[0,0,0]
while a < 3 {
c[a]=b[a]
a = a + 1
}";
let instructions = tokenize(&code);
let ast = parse(&instructions).unwrap();
runtime::execute(&ast, &mut env).unwrap();
assert_eq!(
env.get_ref("c"),
Ok(&Val::Vec(vec![
Val::Number(1.0),
Val::Number(2.0),
Val::Number(3.0)
]))
);
}
#[test]
fn empty_vector() {
let mut env = Environment::new();
let code = "a=[]";
let instructions = tokenize(&code);
let ast = parse(&instructions).unwrap();
runtime::execute(&ast, &mut env).unwrap();
assert_eq!(env.get_ref("a"), Ok(&Val::Vec(vec![])));
}
}
|
use std::collections::HashMap;
use bson::{doc, from_document, Bson};
use futures::TryStreamExt;
use semver::VersionReq;
use serde::{Deserialize, Deserializer};
use crate::{
bson::Document,
options::{FindOptions, ReadPreference, SelectionCriteria, SessionOptions},
test::{
log_uncaptured,
spec::merge_uri_options,
util::is_expected_type,
FailPoint,
Serverless,
TestClient,
DEFAULT_URI,
},
Client,
};
use super::{operation::Operation, test_event::CommandStartedEvent};
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub(crate) struct TestFile {
#[serde(rename = "runOn")]
pub(crate) run_on: Option<Vec<RunOn>>,
pub(crate) database_name: Option<String>,
pub(crate) collection_name: Option<String>,
#[allow(unused)]
pub(crate) bucket_name: Option<String>,
pub(crate) data: Option<TestData>,
#[cfg(feature = "in-use-encryption-unstable")]
pub(crate) json_schema: Option<Document>,
#[cfg(feature = "in-use-encryption-unstable")]
pub(crate) encrypted_fields: Option<Document>,
#[cfg(feature = "in-use-encryption-unstable")]
pub(crate) key_vault_data: Option<Vec<Document>>,
pub(crate) tests: Vec<Test>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub(crate) struct RunOn {
pub(crate) min_server_version: Option<String>,
pub(crate) max_server_version: Option<String>,
pub(crate) topology: Option<Vec<String>>,
pub(crate) serverless: Option<Serverless>,
}
impl RunOn {
pub(crate) fn can_run_on(&self, client: &TestClient) -> bool {
if let Some(ref min_version) = self.min_server_version {
let req = VersionReq::parse(&format!(">= {}", &min_version)).unwrap();
if !req.matches(&client.server_version) {
log_uncaptured(format!(
"runOn mismatch: required server version >= {}, got {}",
min_version, client.server_version
));
return false;
}
}
if let Some(ref max_version) = self.max_server_version {
let req = VersionReq::parse(&format!("<= {}", &max_version)).unwrap();
if !req.matches(&client.server_version) {
log_uncaptured(format!(
"runOn mismatch: required server version <= {}, got {}",
max_version, client.server_version
));
return false;
}
}
if let Some(ref topology) = self.topology {
if !topology.contains(&client.topology_string()) {
log_uncaptured(format!(
"runOn mismatch: required topology in {:?}, got {:?}",
topology,
client.topology_string()
));
return false;
}
}
if let Some(ref serverless) = self.serverless {
if !serverless.can_run() {
log_uncaptured(format!(
"runOn mismatch: required serverless {:?}",
serverless
));
return false;
}
}
true
}
}
#[derive(Debug, Deserialize)]
#[serde(untagged)]
pub(crate) enum TestData {
Single(Vec<Document>),
Many(HashMap<String, Vec<Document>>),
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct Test {
pub(crate) description: String,
pub(crate) skip_reason: Option<String>,
pub(crate) use_multiple_mongoses: Option<bool>,
#[serde(default, rename = "clientOptions")]
pub(crate) client_options: Option<ClientOptions>,
pub(crate) fail_point: Option<FailPoint>,
pub(crate) session_options: Option<HashMap<String, SessionOptions>>,
pub(crate) operations: Vec<Operation>,
#[serde(default, deserialize_with = "deserialize_command_started_events")]
pub(crate) expectations: Option<Vec<CommandStartedEvent>>,
pub(crate) outcome: Option<Outcome>,
}
#[derive(Debug)]
pub(crate) struct ClientOptions {
pub(crate) uri: String,
#[cfg(feature = "in-use-encryption-unstable")]
pub(crate) auto_encrypt_opts: Option<crate::client::csfle::options::AutoEncryptionOptions>,
}
impl<'de> Deserialize<'de> for ClientOptions {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
#[cfg(feature = "in-use-encryption-unstable")]
use serde::de::Error;
#[allow(unused_mut)]
let mut uri_options = Document::deserialize(deserializer)?;
#[cfg(feature = "in-use-encryption-unstable")]
let auto_encrypt_opts = uri_options
.remove("autoEncryptOpts")
.map(bson::from_bson)
.transpose()
.map_err(D::Error::custom)?;
let uri = merge_uri_options(&DEFAULT_URI, Some(&uri_options), true);
Ok(Self {
uri,
#[cfg(feature = "in-use-encryption-unstable")]
auto_encrypt_opts,
})
}
}
#[derive(Debug, Deserialize)]
pub(crate) struct Outcome {
pub(crate) collection: CollectionOutcome,
}
impl Outcome {
pub(crate) async fn assert_matches_actual(
&self,
db_name: &str,
coll_name: &str,
client: &Client,
) {
use crate::coll::options::CollectionOptions;
let coll_name = match self.collection.name.as_deref() {
Some(name) => name,
None => coll_name,
};
#[cfg(not(feature = "in-use-encryption-unstable"))]
let coll_opts = CollectionOptions::default();
#[cfg(feature = "in-use-encryption-unstable")]
let coll_opts = CollectionOptions::builder()
.read_concern(crate::options::ReadConcern::LOCAL)
.build();
let coll = client
.database(db_name)
.collection_with_options(coll_name, coll_opts);
let selection_criteria = SelectionCriteria::ReadPreference(ReadPreference::Primary);
let options = FindOptions::builder()
.sort(doc! { "_id": 1 })
.selection_criteria(selection_criteria)
.build();
let actual_data: Vec<Document> = coll
.find(None, options)
.await
.unwrap()
.try_collect()
.await
.unwrap();
assert_data_matches(&actual_data, &self.collection.data);
}
}
fn assert_data_matches(actual: &[Document], expected: &[Document]) {
assert_eq!(
actual.len(),
expected.len(),
"data length mismatch, expected {:?}, got {:?}",
expected,
actual
);
for (a, e) in actual.iter().zip(expected.iter()) {
assert_doc_matches(a, e);
}
}
fn assert_doc_matches(actual: &Document, expected: &Document) {
assert_eq!(
actual.len(),
expected.len(),
"doc length mismatch, expected {:?}, got {:?}",
expected,
actual
);
for (k, expected_val) in expected {
let actual_val = if let Some(v) = actual.get(k) {
v
} else {
panic!("no value for {:?}, expected {:?}", k, expected_val);
};
if let Some(types) = is_expected_type(expected_val) {
if types.contains(&actual_val.element_type()) {
continue;
} else {
panic!("expected type {:?}, actual value {:?}", types, actual_val);
}
}
match (expected_val, actual_val) {
(Bson::Document(exp_d), Bson::Document(act_d)) => assert_doc_matches(act_d, exp_d),
(e, a) => assert_eq!(e, a, "mismatch for {:?}, expected {:?} got {:?}", k, e, a),
}
}
}
#[derive(Debug, Deserialize)]
pub(crate) struct CollectionOutcome {
pub(crate) name: Option<String>,
pub(crate) data: Vec<Document>,
}
fn deserialize_command_started_events<'de, D>(
deserializer: D,
) -> std::result::Result<Option<Vec<CommandStartedEvent>>, D::Error>
where
D: Deserializer<'de>,
{
let docs = Vec::<Document>::deserialize(deserializer)?;
Ok(Some(
docs.iter()
.map(|doc| {
let event = doc.get_document("command_started_event").unwrap();
from_document(event.clone()).unwrap()
})
.collect(),
))
}
|
//!
//! Support to register a [`GbmDevice`](GbmDevice)
//! to an open [`Session`](::backend::session::Session).
//!
use drm::control::{crtc, Device as ControlDevice, ResourceInfo};
use gbm::BufferObject;
use std::cell::RefCell;
use std::collections::HashMap;
use std::os::unix::io::RawFd;
use std::rc::{Rc, Weak};
use super::{GbmDevice, GbmSurfaceInternal};
use crate::backend::drm::{RawDevice, RawSurface};
use crate::backend::session::{AsSessionObserver, SessionObserver};
/// [`SessionObserver`](SessionObserver)
/// linked to the [`GbmDevice`](GbmDevice) it was
/// created from.
pub struct GbmDeviceObserver<
S: SessionObserver + 'static,
D: RawDevice + ControlDevice + AsSessionObserver<S> + 'static,
> {
observer: S,
backends: Weak<RefCell<HashMap<crtc::Handle, Weak<GbmSurfaceInternal<D>>>>>,
logger: ::slog::Logger,
}
impl<S: SessionObserver + 'static, D: RawDevice + ControlDevice + AsSessionObserver<S> + 'static>
AsSessionObserver<GbmDeviceObserver<S, D>> for GbmDevice<D>
{
fn observer(&mut self) -> GbmDeviceObserver<S, D> {
GbmDeviceObserver {
observer: (**self.dev.borrow_mut()).observer(),
backends: Rc::downgrade(&self.backends),
logger: self.logger.clone(),
}
}
}
impl<S: SessionObserver + 'static, D: RawDevice + ControlDevice + AsSessionObserver<S> + 'static>
SessionObserver for GbmDeviceObserver<S, D>
{
fn pause(&mut self, devnum: Option<(u32, u32)>) {
self.observer.pause(devnum);
}
fn activate(&mut self, devnum: Option<(u32, u32, Option<RawFd>)>) {
self.observer.activate(devnum);
let mut crtcs = Vec::new();
if let Some(backends) = self.backends.upgrade() {
for (crtc, backend) in backends.borrow().iter() {
if let Some(backend) = backend.upgrade() {
// restart rendering loop, if it was previously running
if let Some(Err(err)) = backend
.current_frame_buffer
.get()
.map(|fb| backend.crtc.page_flip(fb.handle()))
{
warn!(self.logger, "Failed to restart rendering loop. Error: {}", err);
}
// reset cursor
{
let &(ref cursor, ref hotspot): &(BufferObject<()>, (u32, u32)) =
unsafe { &*backend.cursor.as_ptr() };
if crtc::set_cursor2(
&*backend.dev.borrow(),
*crtc,
cursor,
((*hotspot).0 as i32, (*hotspot).1 as i32),
)
.is_err()
{
if let Err(err) = crtc::set_cursor(&*backend.dev.borrow(), *crtc, cursor) {
error!(self.logger, "Failed to reset cursor. Error: {}", err);
}
}
}
} else {
crtcs.push(*crtc);
}
}
for crtc in crtcs {
backends.borrow_mut().remove(&crtc);
}
}
}
}
|
use serenity::prelude::*;
use serenity::model::channel::Message;
use serenity::framework::standard::{
CommandResult,
macros::{
command,
group
}
};
group!({
name: "general",
// options: {
// default_command: hello
// },
commands: [restart, test, ping, hello, help, franken_toaster, youmustconstructadditionalpylons],
});
#[command]
#[aliases("reboot")]
fn restart(context: &mut Context, message: &Message) -> CommandResult
{
{
let channel_json = serde_json::to_string(&message.channel_id)?;
use std::fs;
use std::io::Write;
let mut trigger_file = fs::File::create("/home/toaster/fracking-toaster/.trigger")?;
trigger_file.write_all(channel_json.as_bytes())?;
}
message.channel_id.say(&context.http, "Touching trigger to restart bot...")?;
use std::process::Command as RustCommand;
RustCommand::new("touch")
.arg("/home/toaster/fracking-toaster/.trigger")
.output()
.expect("Failed to touch trigger!");
Ok(())
}
#[command]
fn test(ctx: &mut Context, msg: &Message) -> CommandResult
{
msg.channel_id.say(&ctx.http, "I work right!")?;
Ok(())
}
#[command]
fn ping(ctx: &mut Context, msg: &Message) -> CommandResult
{
msg.reply(ctx, "Pong! Was I dead?")?;
Ok(())
}
#[command]
fn hello(ctx: &mut Context, msg: &Message) -> CommandResult
{
msg.channel_id.say(&ctx.http, "Hi!")?;
Ok(())
}
#[command]
fn help(ctx: &mut Context, msg: &Message) -> CommandResult
{
msg.channel_id.say(&ctx.http, "I'm just a toaster! What would I be able to do to help?\n (Psst: I'm under construction, check back in later!)")?;
Ok(())
}
#[command]
fn franken_toaster(ctx: &mut Context, msg: &Message) -> CommandResult
{
msg.channel_id.say(&ctx.http, "Not sure if I'm alive")?;
Ok(())
}
#[command]
fn youmustconstructadditionalpylons(ctx: &mut Context, msg: &Message) -> CommandResult
{
msg.channel_id.say(&ctx.http, "StolenLight asked for this...")?;
Ok(())
} |
use super::error::Result;
use duktape::prelude::*;
use duktape::types::{FromDuktape, ToDuktape};
use duktape::{
error::Error as DukError, error::ErrorKind as DukErrorKind, error::Result as DukResult,
};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use url::Url;
use uuid::Uuid;
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
pub enum Work {
Path(String),
}
impl Work {
pub fn path<T: AsRef<str>>(path: T) -> Work {
// let root = match root {
// Some(root) => Some(root.as_ref().to_path_buf()),
// None => None,
// };
Work::Path(path.as_ref().to_string())
}
}
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
pub struct Task {
id: Uuid,
#[serde(with = "url_serde")]
url: Url,
work: Work,
root: String,
}
impl Task {
pub fn new<T: AsRef<str>>(root: T, url: T, work: Work) -> Result<Task> {
let url = Url::parse(url.as_ref())?;
let id = Uuid::new_v4();
Ok(Task {
work,
url,
id,
root: root.as_ref().to_string(),
})
}
pub fn url(&self) -> &Url {
&self.url
}
pub fn set_url<T: AsRef<Url>>(&mut self, url: T) -> &mut Self {
self.url = url.as_ref().clone();
self
}
pub fn work(&self) -> &Work {
&self.work
}
pub fn set_work(&mut self, work: Work) -> &mut Self {
self.work = work;
self
}
pub fn id(&self) -> &Uuid {
&self.id
}
pub fn set_root(&mut self, root: String) -> &mut Self {
self.root = root;
self
}
pub fn root(&self) -> &str {
self.root.as_str()
}
pub fn into_parse_task(self, html: &str) -> ParseTask {
ParseTask {
id: self.id,
url: self.url,
html: html.to_owned(),
work: self.work,
root: self.root,
}
}
}
impl<'de> FromDuktape<'de> for Task {
fn from_context(ctx: &'de Context, index: Idx) -> DukResult<Self> {
let t = ctx.get::<duktape::types::Object>(index)?;
let url = Url::parse(t.get::<_, &str>("url")?).or_else::<DukError, _>(|e| {
Err(DukErrorKind::TypeError("should be a url".to_owned()).into())
})?;
let work_p = t.get::<_, duktape::types::Ref>("work")?;
let work = match work_p.get_type() {
Type::String => Work::Path(work_p.get::<String>()?),
_ => return Err(DukErrorKind::TypeError(format!("invalid error")).into()),
};
let id = if t.has("id") && t.get::<_, Ref>("id").unwrap().is(Type::String) {
match t.get::<_, String>("id").unwrap().parse::<Uuid>() {
Ok(id) => id,
Err(_) => Uuid::new_v4(),
}
} else {
Uuid::new_v4()
};
let root = if t.has("root") && t.get::<_, Ref>("root").unwrap().is(Type::String) {
t.get::<_, String>("root")?
} else {
"".to_string()
};
Ok(Task {
id,
url,
work,
root,
})
}
}
impl ToDuktape for Task {
fn to_context(self, ctx: &Context) -> DukResult<()> {
let o = ctx.create::<duktape::types::Object>()?;
let w = ctx.create::<duktape::types::Object>()?;
match self.work {
Work::Path(p) => {
w.set("type", "path");
w.set("value", p);
}
};
o.set("url", self.url.as_str()).set("work", w);
o.set("id", self.id.to_hyphenated().to_string());
o.set("root", self.root);
ctx.push(o)?;
Ok(())
}
}
impl ToDuktape for &Task {
fn to_context(self, ctx: &Context) -> DukResult<()> {
let o = ctx.create::<duktape::types::Object>()?;
let w = ctx.create::<duktape::types::Object>()?;
match &self.work {
Work::Path(p) => {
w.set("type", "path");
w.set("value", p);
}
};
o.set("url", self.url.as_str()).set("work", w);
o.set("id", self.id.to_hyphenated().to_string());
o.set("root", &self.root);
ctx.push(o)?;
Ok(())
}
}
#[derive(Debug, PartialEq)]
pub struct ParseTask {
pub(crate) id: Uuid,
pub(crate) url: Url,
pub(crate) html: String,
pub(crate) work: Work,
pub(crate) root: String,
}
impl<'de> FromDuktape<'de> for ParseTask {
fn from_context(ctx: &'de Context, index: Idx) -> DukResult<Self> {
let t = ctx.get::<duktape::types::Object>(index)?;
let url = Url::parse(t.get::<_, &str>("url")?).or_else::<DukError, _>(|e| {
Err(DukErrorKind::TypeError("should be a url".to_owned()).into())
})?;
let work_p = t.get::<_, duktape::types::Ref>("work")?;
let work = match work_p.get_type() {
Type::String => Work::Path(work_p.get::<String>()?),
_ => return Err(DukErrorKind::TypeError(format!("invalid error")).into()),
};
let id = if t.has("id") && t.get::<_, Ref>("id").unwrap().is(Type::String) {
match Uuid::parse_str(t.get::<_, &str>("id").unwrap()) {
Ok(id) => id,
Err(_) => Uuid::new_v4(),
}
} else {
Uuid::new_v4()
};
let html = t.get::<_, String>("html").unwrap_or("".to_string());
let root = t.get::<_, String>("root")?;
Ok(ParseTask {
id,
url,
work,
html,
root,
})
}
}
impl ToDuktape for ParseTask {
fn to_context(self, ctx: &Context) -> DukResult<()> {
let o = ctx.create::<duktape::types::Object>()?;
let w = ctx.create::<duktape::types::Object>()?;
match self.work {
Work::Path(p) => {
w.set("type", "path");
w.set("value", p);
}
};
o.set("url", self.url.as_str()).set("work", w);
o.set("id", self.id.to_hyphenated().to_string());
o.set("html", self.html);
o.set("root", self.root);
ctx.push(o)?;
Ok(())
}
}
impl ToDuktape for &ParseTask {
fn to_context(self, ctx: &Context) -> DukResult<()> {
let o = ctx.create::<duktape::types::Object>()?;
let w = ctx.create::<duktape::types::Object>()?;
match &self.work {
Work::Path(p) => {
w.set("type", "path");
w.set("value", p);
}
};
o.set("url", self.url.as_str()).set("work", w);
o.set("id", self.id.to_hyphenated().to_string());
o.set("html", &self.html);
o.set("root", &self.root);
ctx.push(o)?;
Ok(())
}
}
|
// SPDX-License-Identifier: Apache-2.0
#![deny(clippy::all)]
pub struct Start;
/// A trait containing the function used to enter a keep
pub trait Keep<T> {
/// Enter the keep with the provided input until the next event
fn enter(self: Box<Self>, input: T) -> std::io::Result<Event>;
}
/// All possible keep events
#[allow(non_camel_case_types)]
pub enum Event {
/// Exit the process with the supplied exit value
exit(i32),
/// Get the user identifier of the process
getuid(Box<dyn Keep<libc::uid_t>>),
}
pub fn main(keep: Box<dyn Keep<Start>>) -> ! {
let mut event = keep.enter(Start).unwrap();
loop {
event = match event {
Event::exit(status) => std::process::exit(status),
Event::getuid(keep) => keep.enter(unsafe { libc::getuid() }).unwrap(),
}
}
}
|
use crate::errors::ApiError;
use crate::models::duels::*;
use crate::schema::duels::dsl::{
duels as table_duels, id as col_id, player_1 as col_player_1,
player_2 as col_player_2,
};
use crate::DbConn;
use diesel::prelude::*;
use rocket_contrib::json::{Json, JsonValue};
fn get_duel_id(
new_duel: &NewDuel,
conn: &SqliteConnection,
) -> Result<i32, ApiError> {
match table_duels
.filter(col_player_1.eq(new_duel.player_1))
.filter(col_player_2.eq(new_duel.player_2))
.select(col_id)
.order(col_id.desc())
.limit(1)
.load(conn)
{
Ok(duel_id) => Ok(*duel_id.first().unwrap()),
Err(_) => Err(ApiError::new("Could not find duel", 404)),
}
}
#[post("/", format = "json", data = "<player_ids>")]
pub fn create(
conn: DbConn,
player_ids: Json<[i32; 2]>,
) -> Result<JsonValue, ApiError> {
let [player_1, player_2] = player_ids.into_inner();
let new_duel = NewDuel { player_1, player_2 };
diesel::insert_into(table_duels)
.values(&new_duel)
.execute(&*conn)
.map_err(|_| ApiError::new("Could not create duel", 500))
.and_then(|_| get_duel_id(&new_duel, &*conn))
.map(|duel_id| json!({ "id": duel_id }))
}
#[get("/<id_duel>")]
pub fn get_by_id(conn: DbConn, id_duel: i32) -> Result<JsonValue, ApiError> {
table_duels
.find(id_duel)
.first::<Duel>(&*conn)
.map(|g| json!(g))
.map_err(|_| ApiError::new("Could not find duel", 404))
}
#[get("/")]
pub fn get_all(conn: DbConn) -> Result<JsonValue, ApiError> {
table_duels
.load::<Duel>(&*conn)
.map(|result| json!(result))
.map_err(|_| ApiError::new("Error fetching duels", 500))
}
|
use crate::{FilePos, Span};
use std::collections::HashMap;
// pub fn _init_line_info(global_context: &mut OldContext) -> LineInfoContext {
// let file_pos = FilePos::new();
// let _line_info_list: Vec<usize> = Vec::with_capacity(200);
// let mut context = LineInfoContext::new();
// let li = LineInfo::start_line(file_pos, 0, 0, &mut context, global_context);
// context
// }
pub enum LineInfoType {
Asm,
Ext,
Macro,
MacroParameter,
}
impl LineInfoType {
fn value_of(&self) -> usize {
match self {
LineInfoType::Asm => 0,
LineInfoType::Ext => 1,
LineInfoType::Macro => 2,
LineInfoType::MacroParameter => 3,
}
}
fn make_type(info_type: usize, count: usize) -> usize {
info_type | (count << 2)
}
fn get_type(info_type: usize) -> usize {
info_type & 0x03
}
fn get_count(count: usize) -> usize {
count >> 2
}
}
#[derive(Clone)]
pub struct LineInfo {
node: String, // hash table node - so an entry in the hash map
id: usize,
ref_count: usize,
key: LineInfoKey,
spans: Vec<Span>,
open_spans: Vec<usize>,
}
impl LineInfo {
// pub fn start_line(
// pos: FilePos,
// key_type: usize,
// count: usize,
// li_context: &mut LineInfoContext,
// global_context: &mut OldContext,
// ) {
// let key = LineInfoKey::new(LineInfoType::make_type(key_type, count), pos);
// let mut li = match li_context.find_line_info(&key) {
// Some(li) => li.clone(),
// None => LineInfo::new(key),
// };
// li.open_span_list(global_context);
// li_context.add_line_info(li);
// }
fn new(key: LineInfoKey) -> LineInfo {
LineInfo {
node: String::new(),
id: usize::MAX,
ref_count: 0,
key,
spans: vec![],
open_spans: vec![],
}
}
// fn open_span_list(&mut self, global_context: &mut OldContext) {
// let active_segment = global_context.get_active_segment();
// let span = Span::new(active_segment);
// self.spans.push(span);
// let segment_list = global_context.get_segment_list();
// for segment in segment_list {
// if segment != active_segment {
// let span = Span::new(segment);
// self.spans.push(span);
// }
// }
// }
}
#[derive(Eq, PartialEq, Hash, Clone)]
struct LineInfoKey {
key_type: usize,
pos: FilePos,
}
impl LineInfoKey {
fn new(key_type: usize, pos: FilePos) -> LineInfoKey {
LineInfoKey { key_type, pos }
}
}
pub struct LineInfoContext {
table: HashMap<LineInfoKey, LineInfo>,
active_line: usize,
line_info_list: Vec<LineInfo>,
}
impl LineInfoContext {
pub fn new() -> LineInfoContext {
LineInfoContext {
table: HashMap::new(),
active_line: 0,
line_info_list: vec![],
}
}
fn find_line_info(&self, key: &LineInfoKey) -> Option<&LineInfo> {
self.table.get(key)
}
fn add_line_info(&mut self, line_info: LineInfo) {
self.line_info_list.push(line_info);
}
}
|
//! Defines data structures describing chess moves.
use std::fmt;
use board::*;
/// `MOVE_ENPASSANT`, `MOVE_PROMOTION`, `MOVE_CASTLING`, or `MOVE_NORMAL`.
pub type MoveType = usize;
pub const MOVE_ENPASSANT: MoveType = 0;
pub const MOVE_PROMOTION: MoveType = 1;
pub const MOVE_CASTLING: MoveType = 2;
pub const MOVE_NORMAL: MoveType = 3;
/// Encodes the minimum needed information that unambiguously
/// describes a move.
///
/// `MoveDigest` is a `u16` number. It is laid out the following way:
///
/// ```text
/// 15 0
/// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
/// | | | | | | | | | | | | | | | | |
/// | Move | Origin square | Destination square | Aux |
/// | type | 6 bits | 6 bits | data |
/// | 2 bits| | | | | | | | | | | | | 2 bits|
/// | | | | | | | | | | | | | | | | |
/// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
/// ```
///
/// There are 4 "move type"s: `0`) en-passant capture; `1`) pawn
/// promotion; `2`) castling; `3`) normal move. "Aux data" encodes the
/// type of the promoted piece if the move type is pawn promotion,
/// otherwise it is zero.
#[derive(Debug)]
#[derive(Clone, Copy)]
#[derive(PartialEq, Eq)]
pub struct MoveDigest(u16);
impl MoveDigest {
/// Creates an invalid move digest instance.
///
/// The returned instance contains `0`. This is sometimes useful
/// in places where any move is required but no is available.
#[inline]
pub fn invalid() -> MoveDigest {
MoveDigest(0)
}
/// Returns the move type.
#[inline]
pub fn move_type(&self) -> MoveType {
(self.0 >> SHIFT_MOVE_TYPE & 3) as MoveType
}
/// Returns the origin square of the played piece.
#[inline]
pub fn orig_square(&self) -> Square {
(self.0 >> SHIFT_ORIG_SQUARE & 63) as Square
}
/// Returns the destination square for the played piece.
#[inline]
pub fn dest_square(&self) -> Square {
(self.0 >> SHIFT_DEST_SQUARE & 63) as Square
}
/// Returns a value between 0 and 3 representing the auxiliary
/// data.
///
/// When the move type is pawn promotion, "aux data" encodes the
/// promoted piece type. For all other move types "aux data" is
/// zero.
#[inline]
pub fn aux_data(&self) -> usize {
(self.0 >> SHIFT_AUX_DATA & 3) as usize
}
/// Returns the algebraic notation of the encoded move.
///
/// Examples: `e2e4`, `e7e5`, `e1g1` (white short castling),
/// `e7e8q` (for promotion).
pub fn notation(&self) -> String {
format!("{}{}{}",
notation(self.orig_square()),
notation(self.dest_square()),
match self.move_type() {
MOVE_PROMOTION => ["q", "r", "b", "n"][self.aux_data()],
_ => "",
})
}
/// Returns if the encoded move is a null move.
///
/// "Null move" is a pseudo-move that changes nothing on the board
/// except the side to move. It is sometimes useful to include a
/// speculative null move in the search tree to achieve more
/// aggressive pruning. Null moves are represented as king's moves
/// for which the origin and destination squares are the same.
#[inline]
pub fn is_null(&self) -> bool {
self.orig_square() == self.dest_square() && self.move_type() == MOVE_NORMAL
}
}
impl fmt::Display for MoveDigest {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.notation())
}
}
/// Represents a move on the chessboard.
///
/// `Move` is a `u64` number. It contains 3 types of information:
///
/// 1. Information about the played move itself.
///
/// 2. Information needed so as to be able to undo the move and
/// restore the board into the exact same state as before.
///
/// 3. Move ordering info -- moves with higher move score are tried
/// first.
///
/// Bits 0-15 contain the whole information about the move
/// itself. This is called **"move digest"** and is laid out the
/// following way:
///
/// ```text
/// 15 0
/// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
/// | | | | | | | | | | | | | | | | |
/// | Move | Origin square | Destination square | Aux |
/// | type | 6 bits | 6 bits | data |
/// | 2 bits| | | | | | | | | | | | | 2 bits|
/// | | | | | | | | | | | | | | | | |
/// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
/// ```
///
/// There are 4 "move type"s: `0`) en-passant capture; `1`) pawn
/// promotion; `2`) castling; `3`) normal move. "Aux data" encodes the
/// type of the promoted piece if the move type is pawn promotion,
/// otherwise it is zero.
///
/// Bits 16-31 contain the information needed to undo the move:
///
/// ```text
/// 31 16
/// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
/// | | | | | | | | | | | | | | | | |
/// | | | Captured | Played | Castling | En-passant |
/// | 0 | 0 | piece | piece | rights | file |
/// | | | 3 bits | 3 bits | 4 bits | 4 bits |
/// | | | | | | | | | | | | | | | | |
/// +---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+---+
/// ```
///
/// If the *previous move* was a double pawn push, "en-passant file"
/// contains pushed pawn's file (a value between 0 and 7). Otherwise
/// it contains `8`. "Castling rights" holds the castling rights
/// before the move was played. When "Captured piece" is stored, its
/// bits are inverted, so that MVV-LVA (Most valuable victim -- least
/// valuable aggressor) move ordering is followed for moves that have
/// the same "score".
///
/// Bits 32-63 contain the "score" field, which is used to influence
/// move ordering.
#[derive(Debug)]
#[derive(Clone, Copy)]
#[derive(PartialOrd, Ord, PartialEq, Eq)]
pub struct Move(u64);
impl Move {
/// Creates a new instance.
#[inline(always)]
pub fn new(move_type: MoveType,
orig_square: Square,
dest_square: Square,
aux_data: usize,
captured_piece: PieceType,
played_piece: PieceType,
castling_rights: CastlingRights,
enpassant_file: usize,
score: u32)
-> Move {
debug_assert!(move_type <= 0x11);
debug_assert!(played_piece < PIECE_NONE);
debug_assert!(orig_square <= 63);
debug_assert!(dest_square <= 63);
debug_assert!(captured_piece != KING && captured_piece <= PIECE_NONE);
debug_assert!(enpassant_file <= 8);
debug_assert!(aux_data <= 3);
debug_assert!(move_type == MOVE_PROMOTION || aux_data == 0);
debug_assert!(orig_square != dest_square ||
move_type == MOVE_NORMAL && captured_piece == PIECE_NONE);
Move(// Most probably constants:
(score as u64) << SHIFT_SCORE |
(move_type << SHIFT_MOVE_TYPE | aux_data << SHIFT_AUX_DATA) as u64 |
(
// Sorted by increasing likelihood of being changed in a cycle:
castling_rights.value() << SHIFT_CASTLING_RIGHTS |
enpassant_file << SHIFT_ENPASSANT_FILE |
played_piece << SHIFT_PIECE |
orig_square << SHIFT_ORIG_SQUARE |
(!captured_piece & 7) << SHIFT_CAPTURED_PIECE |
dest_square << SHIFT_DEST_SQUARE) as u64)
}
/// Creates an invalid move instance.
///
/// The returned instance tries to mimic a legal move, but its
/// move digest equals `MoveDigest::invalid()`. This is sometimes
/// useful in places where any move is required but no is
/// available.
#[inline]
pub fn invalid() -> Move {
Move(((!PIECE_NONE & 7) << SHIFT_CAPTURED_PIECE | KING << SHIFT_PIECE) as u64)
}
/// Decodes the promoted piece type from the raw value returned by
/// `aux_data`.
///
/// The interpretation of the raw value is: `0` -- queen, `1` --
/// rook, `2` -- bishop, `3` -- knight.
#[inline]
pub fn piece_from_aux_data(pp_code: usize) -> PieceType {
debug_assert!(pp_code <= 3);
QUEEN + pp_code
}
/// Assigns a new score for the move.
#[inline]
pub fn set_score(&mut self, score: u32) {
const MASK_SCORE: u64 = (::std::u32::MAX as u64) << SHIFT_SCORE;
self.0 &= !MASK_SCORE;
self.0 |= (score as u64) << SHIFT_SCORE;
}
/// Returns the assigned move score.
#[inline]
pub fn score(&self) -> u32 {
(self.0 >> SHIFT_SCORE) as u32
}
/// Returns the move type.
#[inline]
pub fn move_type(&self) -> MoveType {
self.0 as usize >> SHIFT_MOVE_TYPE & 3
}
/// Returns the played piece type.
///
/// Castling is considered as king's move.
#[inline]
pub fn played_piece(&self) -> PieceType {
self.0 as usize >> SHIFT_PIECE & 7
}
/// Returns the origin square of the played piece.
#[inline]
pub fn orig_square(&self) -> Square {
self.0 as usize >> SHIFT_ORIG_SQUARE & 63
}
/// Returns the destination square for the played piece.
#[inline]
pub fn dest_square(&self) -> Square {
self.0 as usize >> SHIFT_DEST_SQUARE & 63
}
/// Returns the captured piece type.
#[inline]
pub fn captured_piece(&self) -> PieceType {
!(self.0 as usize) >> SHIFT_CAPTURED_PIECE & 7
}
/// If the *previous move* was a double pawn push, returns pushed
/// pawn's file (a value between 0 and 7). Otherwise returns `8`.
#[inline]
pub fn enpassant_file(&self) -> usize {
self.0 as usize >> SHIFT_ENPASSANT_FILE & 15
}
/// Returns the castling rights as they were before the move was
/// played.
#[inline]
pub fn castling_rights(&self) -> CastlingRights {
CastlingRights::new(self.0 as usize >> SHIFT_CASTLING_RIGHTS)
}
/// Returns a value between 0 and 3 representing the auxiliary
/// data.
///
/// When the move type is pawn promotion, "aux data" encodes the
/// promoted piece type. For all other move types "aux data" is
/// zero.
#[inline]
pub fn aux_data(&self) -> usize {
self.0 as usize >> SHIFT_AUX_DATA & 3
}
/// Returns the least significant 16 bits of the raw move value.
#[inline]
pub fn digest(&self) -> MoveDigest {
MoveDigest(self.0 as u16)
}
/// Returns the algebraic notation of the move.
///
/// Examples: `e2e4`, `e7e5`, `e1g1` (white short castling),
/// `e7e8q` (for promotion).
pub fn notation(&self) -> String {
format!("{}{}{}",
notation(self.orig_square()),
notation(self.dest_square()),
match self.move_type() {
MOVE_PROMOTION => ["q", "r", "b", "n"][self.aux_data()],
_ => "",
})
}
/// Returns `true` if the move is a pawn advance or a capture,
/// `false` otherwise.
#[inline]
pub fn is_pawn_advance_or_capure(&self) -> bool {
const MASK_PIECE: usize = 7 << SHIFT_PIECE;
const MASK_CAPTURED_PIECE: usize = 7 << SHIFT_CAPTURED_PIECE;
// We use clever bit manipulations to avoid branches.
const P: usize = (!PAWN & 7) << SHIFT_PIECE;
const C: usize = (!PIECE_NONE & 7) << SHIFT_CAPTURED_PIECE;
let v = self.0 as usize;
(v & MASK_PIECE | C) ^ (v & MASK_CAPTURED_PIECE | P) >= MASK_PIECE
}
/// Returns if the move is a null move.
///
/// "Null move" is a pseudo-move that changes nothing on the board
/// except the side to move. It is sometimes useful to include a
/// speculative null move in the search tree to achieve more
/// aggressive pruning. Null moves are represented as king's moves
/// for which the origin and destination squares are the same.
#[inline]
pub fn is_null(&self) -> bool {
debug_assert!(self.orig_square() != self.dest_square() || self.played_piece() == KING);
self.orig_square() == self.dest_square() && self.move_type() == MOVE_NORMAL
}
}
impl fmt::Display for Move {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.notation())
}
}
/// A trait for adding moves to move containers.
pub trait AddMove {
/// Adds a move to the move container.
fn add_move(&mut self, m: Move);
}
impl AddMove for Vec<Move> {
#[inline]
fn add_move(&mut self, m: Move) {
self.push(m);
}
}
// Field shifts
const SHIFT_SCORE: usize = 32;
const SHIFT_CAPTURED_PIECE: usize = 27;
const SHIFT_PIECE: usize = 24;
const SHIFT_CASTLING_RIGHTS: usize = 20;
const SHIFT_ENPASSANT_FILE: usize = 16;
const SHIFT_MOVE_TYPE: usize = 14;
const SHIFT_ORIG_SQUARE: usize = 8;
const SHIFT_DEST_SQUARE: usize = 2;
const SHIFT_AUX_DATA: usize = 0;
/// Returns the algebraic notation for a given square.
fn notation(square: Square) -> &'static str {
lazy_static! {
static ref NOTATION: Vec<String> = (0..64).map(|i| format!("{}{}",
["a", "b", "c", "d", "e", "f", "g", "h"][Board::file(i)],
["1", "2", "3", "4", "5", "6", "7", "8"][Board::rank(i)])
).collect();
}
NOTATION[square].as_str()
}
#[cfg(test)]
mod tests {
use super::*;
use squares::*;
#[test]
fn moves() {
let cr = CastlingRights::new(0b1011);
let mut m = Move::new(MOVE_NORMAL, E2, E4, 0, PIECE_NONE, PAWN, cr, 8, 0);
let m1 = Move::new(MOVE_NORMAL, F3, E4, 0, KNIGHT, PAWN, cr, 8, ::std::u32::MAX);
let m2 = Move::new(MOVE_NORMAL, F3, E4, 0, PIECE_NONE, KING, cr, 8, 0);
let m3 = Move::new(MOVE_PROMOTION, F2, F1, 1, PIECE_NONE, PAWN, cr, 8, 0);
let m4 = Move::new(MOVE_NORMAL, F2, E3, 0, KNIGHT, BISHOP, cr, 8, 0);
let m5 = Move::new(MOVE_NORMAL, F2, F2, 0, PIECE_NONE, KING, cr, 8, 0);
assert!(m1 > m);
assert!(m2 < m);
assert_eq!(m.move_type(), MOVE_NORMAL);
assert_eq!(m.played_piece(), PAWN);
assert_eq!(m.captured_piece(), PIECE_NONE);
assert_eq!(m.orig_square(), E2);
assert_eq!(m.dest_square(), E4);
assert_eq!(m.enpassant_file(), 8);
assert_eq!(m.aux_data(), 0);
assert_eq!(m.castling_rights().value(), 0b1011);
assert_eq!(m.notation(), "e2e4");
assert!(!m.is_null());
assert_eq!(m3.aux_data(), 1);
assert_eq!(Move::piece_from_aux_data(0), QUEEN);
assert_eq!(Move::piece_from_aux_data(1), ROOK);
assert_eq!(Move::piece_from_aux_data(2), BISHOP);
assert_eq!(Move::piece_from_aux_data(3), KNIGHT);
let m_copy = m;
assert_eq!(m, m_copy);
assert_eq!(m.score(), 0);
m.set_score(::std::u32::MAX);
assert_eq!(m.score(), ::std::u32::MAX);
assert!(m > m_copy);
m.set_score(0);
assert_eq!(m.score(), 0);
assert!(m.is_pawn_advance_or_capure());
assert!(m1.is_pawn_advance_or_capure());
assert!(!m2.is_pawn_advance_or_capure());
assert!(m3.is_pawn_advance_or_capure());
assert!(m4.is_pawn_advance_or_capure());
assert!(!m5.is_pawn_advance_or_capure());
assert!(m5.is_null());
assert!(MOVE_NORMAL != 0);
assert!(!Move::invalid().is_null());
assert_eq!(Move::invalid().digest(), MoveDigest::invalid());
assert_eq!(m.digest().move_type(), m.move_type());
assert_eq!(m.digest().orig_square(), m.orig_square());
assert_eq!(m.digest().dest_square(), m.dest_square());
assert_eq!(m.digest().aux_data(), m.aux_data());
}
}
|
use bs58;
use clear_on_drop::ClearOnDrop;
use crc8;
use ed25519_dalek;
use failure::Error;
use rand::{RngCore};
use sha2;
use std::fmt;
use std::str::FromStr;
#[derive(Debug, Fail)]
pub enum IdentityError {
#[fail(display = "invalid address: length")]
InvalidLen,
#[fail(display = "invalid address")]
InvalidAddress,
}
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Identity([u8; 32]);
#[derive(Clone)]
pub struct Secret(ClearOnDrop<Box<[u8; 32]>>);
#[derive(Clone)]
pub struct Signature([u8; 64]);
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Address([u8; 32]);
#[derive(Clone)]
pub struct SignedAddress(Address, Signature);
// --- Secret
impl Secret {
pub fn identity(&self) -> Identity {
use ed25519_dalek::{PublicKey, SecretKey};
let secret_key: SecretKey = SecretKey::from_bytes(&*self.0).unwrap();
let pk: PublicKey = PublicKey::from_secret::<sha2::Sha512>(&secret_key);
Identity::from_bytes(pk.as_bytes()).unwrap()
}
pub fn address(&self) -> Address {
use x25519_dalek::generate_public;
let mut secret = [0; 32];
secret.copy_from_slice(&*self.0);
let xpublic = generate_public(&secret);
Address(xpublic.to_bytes())
}
pub fn sign(&self, purpose: &[u8], text: &[u8]) -> Signature {
use ed25519_dalek::{PublicKey, SecretKey};
let sk: SecretKey = SecretKey::from_bytes(&*self.0).unwrap();
let pk: PublicKey = PublicKey::from_secret::<sha2::Sha512>(&sk);
let kp = ed25519_dalek::Keypair { secret: sk, public: pk };
let mut stext = purpose.to_vec();
stext.extend_from_slice(&text);
Signature(kp.sign::<sha2::Sha512>(&stext).to_bytes())
}
pub fn gen() -> Self {
use rand::rngs::OsRng;
let mut a = [0u8; 32];
let mut rng = OsRng::new().unwrap();
rng.try_fill_bytes(&mut a).unwrap();
Secret(ClearOnDrop::new(Box::new(a)))
}
pub fn from_array(a: [u8; 32]) -> Self {
Secret(ClearOnDrop::new(Box::new(a)))
}
pub fn as_bytes(&self) -> &[u8] {
&*self.0
}
pub fn from_bytes<B: AsRef<[u8]>>(b: B) -> Result<Self, Error> {
let b = b.as_ref();
if b.len() != 32 {
return Err(IdentityError::InvalidLen.into());
}
let mut a = [0u8; 32];
a.copy_from_slice(b);
Ok(Self::from_array(a))
}
pub fn to_string(&self) -> String {
let mut v = Vec::new();
v.push(8 as u8);
v.push(3 as u8);
v.extend_from_slice(&*self.0);
let mut crc8 = crc8::Crc8::create_lsb(130);
let crc = crc8.calc(&v.as_ref(), v.len() as i32, 0);
v.push(crc);
bs58::encode(v).with_alphabet(bs58::alphabet::BITCOIN).into_string()
}
}
impl FromStr for Secret {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = bs58::decode(s).with_alphabet(bs58::alphabet::BITCOIN).into_vec()?;
if s.len() < 35 {
return Err(IdentityError::InvalidLen.into());
}
let mut crc8 = crc8::Crc8::create_lsb(130);
let crc = crc8.calc(&s, s.len() as i32 - 1, 0);
if crc != s[s.len() - 1] {
return Err(IdentityError::InvalidAddress.into());
}
if s[0] != 8 {
return Err(IdentityError::InvalidAddress.into());
}
if s[1] != 3 {
return Err(IdentityError::InvalidAddress.into());
}
let mut a = [0u8; 32];
a.copy_from_slice(&s[2..s.len() - 1]);
Ok(Secret(ClearOnDrop::new(Box::new(a))))
}
}
impl fmt::Debug for Secret {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<secret>")
}
}
// --- Address
impl Address {
pub fn as_bytes(&self) -> &[u8] {
&self.0
}
pub fn from_array(a: [u8; 32]) -> Self {
Address(a)
}
pub fn from_bytes<B: AsRef<[u8]>>(b: B) -> Result<Self, Error> {
let b = b.as_ref();
if b.len() != 32 {
return Err(IdentityError::InvalidLen.into());
}
let mut a = [0u8; 32];
a.copy_from_slice(b);
Ok(Address(a))
}
pub fn to_string(&self) -> String {
let mut v = Vec::new();
v.push(8 as u8);
v.push(6 as u8);
v.extend_from_slice(&self.0);
let mut crc8 = crc8::Crc8::create_lsb(130);
let crc = crc8.calc(&v.as_ref(), v.len() as i32, 0);
v.push(crc);
bs58::encode(v).with_alphabet(bs58::alphabet::BITCOIN).into_string()
}
}
impl FromStr for Address {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = bs58::decode(s).with_alphabet(bs58::alphabet::BITCOIN).into_vec()?;
if s.len() < 35 {
return Err(IdentityError::InvalidLen.into());
}
let mut crc8 = crc8::Crc8::create_lsb(130);
let crc = crc8.calc(&s, s.len() as i32 - 1, 0);
if crc != s[s.len() - 1] {
return Err(IdentityError::InvalidAddress.into());
}
if s[0] != 8 {
return Err(IdentityError::InvalidAddress.into());
}
if s[1] != 6 {
return Err(IdentityError::InvalidAddress.into());
}
if s.len() - 3 < 32 {
return Err(IdentityError::InvalidAddress.into());
}
let mut b = [0; 32];
b.copy_from_slice(&s[2..s.len() - 1]);
Ok(Address(b))
}
}
impl fmt::Display for Address {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
self.to_string().fmt(fmt)
}
}
impl fmt::Debug for Address {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
self.to_string().fmt(fmt)
}
}
// --- Signature
impl Signature {
pub fn to_string(&self) -> String {
let mut v = Vec::new();
v.push(8 as u8);
v.push(2 as u8);
v.extend_from_slice(&self.0);
let mut crc8 = crc8::Crc8::create_lsb(130);
let crc = crc8.calc(&v.as_ref(), v.len() as i32, 0);
v.push(crc);
bs58::encode(v).with_alphabet(bs58::alphabet::BITCOIN).into_string()
}
pub fn as_bytes(&self) -> &[u8] {
self.0.as_ref()
}
pub fn from_array(a: [u8; 64]) -> Self {
Signature(a)
}
pub fn from_bytes<B: AsRef<[u8]>>(b: B) -> Result<Self, Error> {
let b = b.as_ref();
if b.len() != 64 {
return Err(IdentityError::InvalidLen.into());
}
let mut a = [0u8; 64];
a.copy_from_slice(b);
Ok(Signature(a))
}
}
impl FromStr for Signature {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = bs58::decode(s).with_alphabet(bs58::alphabet::BITCOIN).into_vec()?;
if s.len() < 67 {
return Err(IdentityError::InvalidLen.into());
}
let mut crc8 = crc8::Crc8::create_lsb(130);
let crc = crc8.calc(&s, s.len() as i32 - 1, 0);
if crc != s[s.len() - 1] {
return Err(IdentityError::InvalidAddress.into());
}
if s[0] != 8 {
return Err(IdentityError::InvalidAddress.into());
}
if s[1] != 2 {
return Err(IdentityError::InvalidAddress.into());
}
let mut b = [0; 64];
b.copy_from_slice(&s[2..s.len() - 1]);
Ok(Signature(b))
}
}
impl fmt::Display for Signature {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
self.to_string().fmt(fmt)
}
}
// -- Identity
impl Identity {
pub fn verify(&self, purpose: &[u8], text: &[u8], signature: &Signature) -> Result<(), Error> {
let sig = ed25519_dalek::Signature::from_bytes(&signature.0)?;
let pk = ed25519_dalek::PublicKey::from_bytes(&self.0)?;
let mut stext = purpose.to_vec();
stext.extend_from_slice(&text);
pk.verify::<sha2::Sha512>(&stext, &sig)?;
Ok(())
}
pub fn as_bytes(&self) -> &[u8] {
self.0.as_ref()
}
pub fn from_array(a: [u8; 32]) -> Self {
Identity(a)
}
pub fn from_bytes<B: AsRef<[u8]>>(b: B) -> Result<Self, Error> {
let b = b.as_ref();
if b.len() != 32 {
return Err(IdentityError::InvalidLen.into());
}
let mut a = [0u8; 32];
a.copy_from_slice(b);
Ok(Identity(a))
}
pub fn to_string(&self) -> String {
let mut v = Vec::new();
v.push(8 as u8);
v.push(9 as u8);
v.extend_from_slice(&self.0);
let mut crc8 = crc8::Crc8::create_lsb(130);
let crc = crc8.calc(&v.as_ref(), v.len() as i32, 0);
v.push(crc);
bs58::encode(v).with_alphabet(bs58::alphabet::BITCOIN).into_string()
}
}
impl FromStr for Identity {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let s = bs58::decode(s).with_alphabet(bs58::alphabet::BITCOIN).into_vec()?;
if s.len() < 35 {
return Err(IdentityError::InvalidLen.into());
}
let mut crc8 = crc8::Crc8::create_lsb(130);
let crc = crc8.calc(&s, s.len() as i32 - 1, 0);
if crc != s[s.len() - 1] {
return Err(IdentityError::InvalidAddress.into());
}
if s[0] != 8 {
return Err(IdentityError::InvalidAddress.into());
}
if s[1] != 9 {
return Err(IdentityError::InvalidAddress.into());
}
let mut b = [0; 32];
b.copy_from_slice(&s[2..s.len() - 1]);
Ok(Identity(b))
}
}
impl fmt::Display for Identity {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
self.to_string().fmt(fmt)
}
}
impl fmt::Debug for Identity {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
self.to_string().fmt(fmt)
}
}
// -- Signed Address
impl SignedAddress {
pub fn sign(secret: &Secret, address: Address) -> SignedAddress {
let signature = secret.sign(b"carrier signed exchange address", &address.0);
SignedAddress(address, signature)
}
pub fn to_vec(&self) -> Vec<u8> {
let mut b = (self.0).0.to_vec();
b.extend_from_slice(&(self.1).0);
b
}
pub fn verify(&self, identity: &Identity) -> Result<(), Error> {
identity.verify(b"carrier signed exchange address", &(self.0).0, &self.1)
}
pub fn from_bytes<B: AsRef<[u8]>>(b: B) -> Result<Self, Error> {
let b = b.as_ref();
if b.len() != 96 {
return Err(IdentityError::InvalidLen.into());
}
let (b1, b2) = b.split_at(32);
let mut a1 = [0; 32];
a1.copy_from_slice(b1.as_ref());
let mut a2 = [0; 64];
a2.copy_from_slice(b2.as_ref());
Ok(SignedAddress(Address(a1), Signature(a2)))
}
pub fn address(&self) -> &Address {
&self.0
}
}
pub fn generate_x25519() -> (Secret, [u8; 32]) {
let secret = Secret::gen();
let public = secret.address();
(secret, public.0)
}
#[test]
fn parse() {
let s = "oXBUPpxoaRixVSgEdtPxhUNRfUY5KDztGqjEmEmc6Pp3vX1";
let id : Identity = s.parse().unwrap();
assert_eq!(id.to_string(), s);
}
#[test]
fn public_id() {
let client_secret = Secret::from_array([
0x9d, 0x61, 0xb1, 0x9d, 0xef, 0xfd, 0x5a, 0x60, 0xba, 0x84, 0x4a, 0xf4, 0x92, 0xec, 0x2c, 0xc4, 0x44, 0x49,
0xc5, 0x69, 0x7b, 0x32, 0x69, 0x19, 0x70, 0x3b, 0xac, 0x03, 0x1c, 0xae, 0x7f, 0x60,
]);
let client_identity = client_secret.identity();
assert_eq!(
client_identity.to_string(),
"oXBUPpxoaRixVSgEdtPxhUNRfUY5KDztGqjEmEmc6Pp3vX1"
);
}
#[test]
fn sign() {
let client_secret = Secret::from_array([
0x9d, 0x61, 0xb1, 0x9d, 0xef, 0xfd, 0x5a, 0x60, 0xba, 0x84, 0x4a, 0xf4, 0x92, 0xec, 0x2c, 0xc4, 0x44, 0x49,
0xc5, 0x69, 0x7b, 0x32, 0x69, 0x19, 0x70, 0x3b, 0xac, 0x03, 0x1c, 0xae, 0x7f, 0x60,
]);
let client_identity = client_secret.identity();
let text = b"beeb bob";
let signature = client_secret.sign(b"goes on postcards", text);
assert!(client_identity.verify(b"goes on postcards", text, &signature).is_ok());
assert!(
client_identity
.verify(b"does not go on postcards", text, &signature)
.is_err()
);
let mut signature_invalid = signature.0.clone();
let flip = rand::random::<usize>() % signature_invalid.len();
loop {
let rb = rand::random::<u8>();
if signature_invalid[flip] != rb {
signature_invalid[flip] = rb;
break;
}
}
assert!(
client_identity
.verify(b"goes on postcards", text, &Signature(signature_invalid))
.is_err()
);
let mut text_invalid = text.to_vec();
text_invalid[rand::random::<usize>() % text.len()] = 0x00;
assert!(
client_identity
.verify(b"goes on postcards", &text_invalid, &signature)
.is_err()
);
}
|
use bc::mem::Mem;
use bc::BlockChain;
use bc::block::*;
use proto::*;
#[test]
fn block_new() {
let b = Block::new(
0,
String::from("id"),
String::from("key"),
String::from("value"),
String::from("prev_hash"),
);
assert_eq!(b.idx, 0);
assert_eq!(b.id, String::from("id"));
assert_eq!(b.key, String::from("key"));
assert_eq!(b.value, String::from("value"));
assert_eq!(
b.get_hash(),
String::from("7dc5bd97731fb94efbb8bbe434a9f29d64b3c38084d7818690c9b720c86f3c20")
);
}
#[test]
fn into() {
let b = Block::new(
1,
String::from("id"),
String::from("key"),
String::from("value"),
String::from("prev_hash"),
);
let b: byzan::Block = b.into();
assert_eq!(b.get_idx(), 1);
assert_eq!(b.id, String::from("id"));
assert_eq!(b.get_key(), String::from("key"));
assert_eq!(b.get_value(), String::from("value"));
assert_eq!(b.get_prev_hash(), String::from("prev_hash"));
assert_eq!(
b.get_self_hash(),
String::from("a5e83640dfb8513f0e3b066d73c34a0ee057a9420ac83de49751f11dd8c1699c")
);
}
#[test]
fn from() {
let mut b = byzan::Block::new();
b.set_idx(1);
b.set_id(String::from("id"));
b.set_key(String::from("key"));
b.set_value(String::from("value"));
b.set_prev_hash(String::from("prev_hash"));
b.set_self_hash(String::from("self_hash"));
let b = Block::from(b);
assert_eq!(b.idx, 1);
assert_eq!(b.id, String::from("id"));
assert_eq!(b.key, String::from("key"));
assert_eq!(b.value, String::from("value"));
assert_eq!(b.prev_hash, String::from("prev_hash"));
assert_eq!(b.self_hash, String::from("self_hash"));
}
#[test]
fn block_genesis() {
let b = Block::genesis();
assert_eq!(b.idx, 0);
assert_eq!(b.id, String::from("genesis"));
assert_eq!(b.key, String::from("genesis"));
assert_eq!(b.value, String::from("genesis"));
assert_eq!(
b.self_hash,
String::from("495298f3856aca18c2e7f8420829df42b24dd43047b49e474bbebe875e41aa00"),
);
}
#[test]
fn new_block_new() {
let new_block = NewBlock::new(String::from("id"), String::from("key"), String::from("value"));
assert_eq!(new_block.id, String::from("id"));
assert_eq!(new_block.key, String::from("key"));
assert_eq!(new_block.value, String::from("value"));
}
#[test]
fn new_block_from() {
let mut new_block = byzan::NewBlock::new();
new_block.set_id(String::from("id"));
new_block.set_key(String::from("key"));
new_block.set_value(String::from("value"));
let new_block = NewBlock::from(new_block);
assert_eq!(new_block.id, String::from("id"));
assert_eq!(new_block.key, String::from("key"));
assert_eq!(new_block.value, String::from("value"));
}
#[test]
fn valid_prev_hash() {
let b = Block::new(0, String::from(""), String::from(""), String::from(""), String::from(""));
let i = Block::new(1, String::from(""), String::from(""), String::from(""), b.self_hash.clone());
let r = i.valid_prev_hash(&b);
assert_eq!(r, Ok(()));
}
#[test]
fn valid_prev_hash_invalid_index() {
let b = Block::new(0, String::from(""), String::from(""), String::from(""), String::from(""));
let i = Block::new(2, String::from(""), String::from(""), String::from(""), String::from(""));
let r = i.valid_prev_hash(&b);
assert_eq!(r, Err(String::from("invalid index")));
}
#[test]
fn valid_prev_hash_invalid_prev_hash() {
let b = Block::new(0, String::from(""), String::from(""), String::from(""), String::from(""));
let i = Block::new(1, String::from(""), String::from(""), String::from(""), String::from(""));
let r = i.valid_prev_hash(&b);
assert_eq!(r, Err(String::from("invalid prev hash")));
}
#[test]
fn valid_id() {
let m = Mem::new();
let b = Block::new(1, String::from(""), String::from(""), String::from(""), String::from(""));
let r = b.valid_id(&m);
assert_eq!(r, Ok(()));
}
#[test]
fn valid_id_already_included() {
let mut m = Mem::new();
let h = m.last().unwrap().self_hash.clone();
let b0 = Block::new(1, String::from("hoge"), String::from(""), String::from(""), h);
let b1 = Block::new(2, String::from("hoge"), String::from(""), String::from(""), b0.self_hash.clone());
let _ = m.push_block(b0);
let r = b1.valid_id(&m);
assert_eq!(r, Err(String::from("the block is already included")));
}
#[test]
fn valid_hash() {
let b = Block::new(1, String::from(""), String::from(""), String::from(""), String::from(""));
let r = b.valid_hash();
assert_eq!(r, Ok(()));
}
#[test]
fn light_block_new() {
let light_block = LightBlock::new(1, String::from("hash"));
assert_eq!(light_block.idx, 1);
assert_eq!(light_block.self_hash, String::from("hash"));
}
#[test]
fn light_block_from() {
let b = Block::new(
1,
String::from("id"),
String::from("key"),
String::from("value"),
String::from("prev_hash"),
);
let light_block: LightBlock = b.clone().into();
assert_eq!(light_block.idx, 1);
assert_eq!(light_block.self_hash, b.self_hash);
}
|
//! Defines the metric length standards as newtypes
use core::{self, fmt};
use length::imperial::*;
use length::astronomical::*;
use composite::UnitName;
/// A newtype that wraps around `f64` and provides convenience functions for unit-aware and type-safe manipulation.
#[derive(Clone, Copy)]
pub struct Meter(pub f64);
pub type Meters = Meter;
pub type Metre = Meter;
pub type Metres = Meter;
impl_full_unit!(Meter);
impl_unit_debug!(Meter => "m");
impl_from_cf!(AU <===> 149597870700.0 Meter );
impl_from_cf!(Kilometer <===> 1_000.0 Meter );
impl_from_cf!(Mile <===> 1_609.344 Meter );
impl_from_cf!(Meter <===> 100.0 Centimeter);
impl_from_cf!(Meter <===> 1_000.0 Millimeter);
impl_from_cf!(Meter <===> 1_000_000.0 Micrometer);
impl_from_cf!(Meter <===> 1_000_000_000.0 Nanometer );
impl_from_cf!(Meter <===> 1_000_000_000_000.0 Picometer );
impl_from_cf!(Meter <===> 1_000_000_000_000_000.0 Femtometer);
impl_from_cf!(Meter <===> 39.37007 Inch );
impl_from_cf!(Meter <===> 3.280841666667 Foot );
impl_from_cf!(Meter <===> 1.093613888889 Yard );
/// A newtype that wraps around `f64` and provides convenience functions for unit-aware and type-safe manipulation.
#[derive(Clone, Copy)]
pub struct Kilometer(pub f64);
pub type Kilometers = Kilometer;
pub type Kilometre = Kilometer;
pub type Kilometres = Kilometer;
impl_full_unit!(Kilometer);
impl_unit_debug!(Kilometer => "km");
impl_from_cf!(AU <===> 149597870.70 Kilometer );
impl_from_cf!(Mile <===> 1.609344 Kilometer );
impl_from_cf!(Kilometer <===> 100_000.00 Centimeter);
impl_from_cf!(Kilometer <===> 1_000_000.00 Millimeter);
impl_from_cf!(Kilometer <===> 1_000_000_000.00 Micrometer);
impl_from_cf!(Kilometer <===> 1_000_000_000_000.00 Nanometer );
impl_from_cf!(Kilometer <===> 1_000_000_000_000_000.00 Picometer );
impl_from_cf!(Kilometer <===> 1_000_000_000_000_000_000.00 Femtometer);
impl_from_cf!(Kilometer <===> 39_370.07 Inch );
impl_from_cf!(Kilometer <===> 3_280.84 Foot );
impl_from_cf!(Kilometer <===> 1_093.61 Yard );
/// A newtype that wraps around `f64` and provides convenience functions for unit-aware and type-safe manipulation.
#[derive(Clone, Copy)]
pub struct Centimeter(pub f64);
pub type Centimeters = Centimeter;
pub type Centimetre = Centimeter;
pub type Centimetres = Centimeter;
impl_full_unit!(Centimeter);
impl_unit_debug!(Centimeter => "cm");
impl_from_cf!(Centimeter <===> 10.00 Millimeter);
impl_from_cf!(Centimeter <===> 10_000.00 Micrometer);
impl_from_cf!(Centimeter <===> 10_000_000.00 Nanometer );
impl_from_cf!(Centimeter <===> 10_000_000_000.00 Picometer );
impl_from_cf!(Centimeter <===> 10_000_000_000_000.00 Femtometer);
impl_from_cf!(Inch <===> 2.54 Centimeter);
impl_from_cf!(Foot <===> 30.48 Centimeter);
impl_from_cf!(Yard <===> 91.44 Centimeter);
impl_from_cf!(Mile <===> 160934.40 Centimeter);
impl_from_cf!(AU <===> 14959787070000.00 Centimeter);
/// A newtype that wraps around `f64` and provides convenience functions for unit-aware and type-safe manipulation.
#[derive(Clone, Copy)]
pub struct Millimeter(pub f64);
pub type Millimeters = Millimeter;
pub type Millimetre = Millimeter;
pub type Millimetres = Millimeter;
impl_full_unit!(Millimeter);
impl_unit_debug!(Millimeter => "mm");
impl_from_cf!(Millimeter <===> 1_000.0 Micrometer);
impl_from_cf!(Millimeter <===> 1_000_000.0 Nanometer );
impl_from_cf!(Millimeter <===> 1_000_000_000.0 Picometer );
impl_from_cf!(Millimeter <===> 1_000_000_000_000.0 Femtometer);
impl_from_cf!(Inch <===> 25.4 Millimeter);
impl_from_cf!(Foot <===> 304.8 Millimeter);
impl_from_cf!(Yard <===> 914.4 Millimeter);
impl_from_cf!(Mile <===> 1609344.0 Millimeter);
impl_from_cf!(AU <===> 149597870700000.0 Millimeter);
/// A newtype that wraps around `f64` and provides convenience functions for unit-aware and type-safe manipulation.
#[derive(Clone, Copy)]
pub struct Micrometer(pub f64);
pub type Micrometers = Micrometer;
pub type Micrometre = Micrometer;
pub type Micrometres = Micrometer;
impl_full_unit!(Micrometer);
impl_unit_debug!(Micrometer => "um");
impl_from_cf!(Micrometer <===> 1_000.0 Nanometer );
impl_from_cf!(Micrometer <===> 1_000_000.0 Picometer );
impl_from_cf!(Micrometer <===> 1_000_000_000.0 Femtometer);
impl_from_cf!(Inch <===> 25400.0 Micrometer);
impl_from_cf!(Foot <===> 304800.0 Micrometer);
impl_from_cf!(Yard <===> 914400.0 Micrometer);
impl_from_cf!(Mile <===> 1609344000.0 Micrometer);
impl_from_cf!(AU <===> 149597870700000000.0 Micrometer);
/// A newtype that wraps around `f64` and provides convenience functions for unit-aware and type-safe manipulation.
#[derive(Clone, Copy)]
pub struct Nanometer(pub f64);
pub type Nanometers = Nanometer;
pub type Nanometre = Nanometer;
pub type Nanometres = Nanometer;
impl_full_unit!(Nanometer);
impl_unit_debug!(Nanometer => "um");
impl_from_cf!(Nanometer <===> 1000.0 Picometer );
impl_from_cf!(Nanometer <===> 1000000.0 Femtometer);
impl_from_cf!(Inch <===> 25400000.0 Nanometer );
impl_from_cf!(Foot <===> 304800000.0 Nanometer );
impl_from_cf!(Yard <===> 914400000.0 Nanometer );
impl_from_cf!(Mile <===> 1609344000000.0 Nanometer );
impl_from_cf!(AU <===> 149597870700000000000.0 Nanometer );
/// A newtype that wraps around `f64` and provides convenience functions for unit-aware and type-safe manipulation.
#[derive(Clone, Copy)]
pub struct Picometer(pub f64);
pub type Picometers = Picometer;
pub type Picometre = Picometer;
pub type Picometres = Picometer;
impl_full_unit!(Picometer);
impl_unit_debug!(Picometer => "pm");
impl_from_cf!(Picometer <===> 1000.0 Femtometer);
impl_from_cf!(Inch <===> 25400000000.0 Picometer );
impl_from_cf!(Foot <===> 304800000000.0 Picometer );
impl_from_cf!(Yard <===> 914400000000.0 Picometer );
impl_from_cf!(Mile <===> 1609344000000000.0 Picometer );
impl_from_cf!(AU <===> 149597870700000000000000.0 Picometer );
/// A newtype that wraps around `f64` and provides convenience functions for unit-aware and type-safe manipulation.
#[derive(Clone, Copy)]
pub struct Femtometer(pub f64);
pub type Femtometers = Femtometer;
pub type Femtometre = Femtometer;
pub type Femtometres = Femtometer;
impl_full_unit!(Femtometer);
impl_unit_debug!(Femtometer => "fm");
impl_from_cf!(Inch <===> 25400000000000.0 Femtometer);
impl_from_cf!(Foot <===> 304800000000000.0 Femtometer);
impl_from_cf!(Yard <===> 914400000000000.0 Femtometer);
impl_from_cf!(Mile <===> 1609344000000000000.0 Femtometer);
impl_from_cf!(AU <===> 149597870700000000000000000.0 Femtometer);
|
use std::collections::{HashMap, HashSet};
use std::str::FromStr;
use rand::{Rng, StdRng};
use error::{Error, Result};
use maze::{Coord, Direction, Maze, Wall};
#[derive(Debug, Clone, Copy)]
pub enum GeneratorType {
DFS,
Kruskal,
Prim,
Eller,
HuntKill,
}
impl GeneratorType {
/// A list of possible variants in `&'static str` form
pub fn variants() -> [&'static str; 5] {
["dfs", "kruskal", "prim", "eller", "hunt-kill"]
}
pub fn init(&self, maze: &Maze, random: &mut StdRng) -> Box<dyn Generator> {
match *self {
GeneratorType::DFS => Box::new(DFS::new(maze)),
GeneratorType::Kruskal => Box::new(Kruskal::new(maze, random)),
GeneratorType::Prim => Box::new(Prim::new(maze)),
GeneratorType::Eller => Box::new(Eller::new(maze)),
GeneratorType::HuntKill => Box::new(HuntKill::new(maze)),
}
}
}
impl FromStr for GeneratorType {
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
match s.to_lowercase().as_ref() {
"dfs" => Ok(GeneratorType::DFS),
"kruskal" => Ok(GeneratorType::Kruskal),
"prim" => Ok(GeneratorType::Prim),
"eller" => Ok(GeneratorType::Eller),
"hunt-kill" => Ok(GeneratorType::HuntKill),
_ => Err(Error::UnsupportedGenerator(s.to_string())),
}
}
}
pub trait Generator {
fn is_done(&self) -> bool;
fn tick(&mut self, maze: &mut Maze, random: &mut StdRng) -> Result<()>;
}
pub struct DFS {
pub current: Option<Coord>,
pub stack: Vec<Coord>,
}
impl DFS {
fn new(maze: &Maze) -> DFS {
DFS {
current: Some(maze.start),
stack: vec![],
}
}
fn available_neighbour(&self, maze: &Maze, random: &mut StdRng) -> Option<(Coord, Direction)> {
let current = match self.current {
Some(ref current) => current,
None => return None,
};
if maze.end == *current {
return None;
}
let mut neighbours = maze.neighbours(current);
random.shuffle(&mut neighbours);
neighbours
.into_iter()
.find(|(c, _)| !maze.explored.contains(&c))
}
}
impl Generator for DFS {
fn is_done(&self) -> bool {
self.current.is_none()
}
fn tick(&mut self, maze: &mut Maze, random: &mut StdRng) -> Result<()> {
maze.highlight_bright.clear();
let current = match self.current {
Some(ref current) => *current,
None => return Ok(()),
};
maze.highlight_bright.insert(current);
maze.highlight_medium.insert(current);
maze.explored.insert(current);
match self.available_neighbour(&maze, random) {
Some((neighbour, _)) => {
maze.link(¤t, &neighbour)?;
self.stack.push(current);
self.current = Some(neighbour);
}
None => {
maze.highlight_medium.remove(¤t);
self.current = self.stack.pop();
}
}
Ok(())
}
}
enum JoinResult {
Joined,
Nop,
}
pub struct Kruskal {
walls: Vec<Wall>,
sets: Vec<HashSet<Coord>>,
}
impl Kruskal {
pub fn new(maze: &Maze, random: &mut StdRng) -> Kruskal {
let mut walls = maze
.walls
.iter()
.filter(|w| w.removable())
.cloned()
.collect::<Vec<_>>();
random.shuffle(&mut walls);
Kruskal {
walls,
sets: maze
.cells
.keys()
.map(|c| {
let mut set = HashSet::new();
set.insert(*c);
set
})
.collect(),
}
}
fn join(&mut self, c1: Coord, c2: Coord, maze: &Maze) -> Result<JoinResult> {
let mut c1_set = self
.sets
.drain_filter(|s| s.contains(&c1))
.next()
.ok_or_else(|| Error::MissingSet(c1))?;
if c1_set.contains(&c2) {
self.sets.push(c1_set);
return Ok(JoinResult::Nop);
}
let c2_set = self
.sets
.drain_filter(|s| s.contains(&c2))
.next()
.ok_or_else(|| Error::MissingSet(c2))?;
for c in c2_set {
for (neighbour, direction) in maze.neighbours(&c) {
if c1_set.contains(&neighbour) {
self.walls.remove_item(&maze.wall(&c, &direction));
}
}
c1_set.insert(c);
}
self.sets.push(c1_set);
Ok(JoinResult::Joined)
}
}
impl Generator for Kruskal {
fn is_done(&self) -> bool {
self.walls.is_empty() || self.sets.len() <= 1
}
fn tick(&mut self, maze: &mut Maze, _random: &mut StdRng) -> Result<()> {
maze.highlight_bright.clear();
if self.is_done() {
return Ok(());
}
if let Some(wall) = self.walls.pop() {
let (c1, c2) = maze.divided_coords(&wall);
maze.explored.insert(c1);
maze.explored.insert(c2);
maze.highlight_bright.insert(c1);
maze.highlight_bright.insert(c2);
match self.join(c1, c2, maze) {
Err(e) => return Err(e),
Ok(JoinResult::Joined) => {
maze.walls.remove(&wall);
}
Ok(JoinResult::Nop) => {}
};
}
Ok(())
}
}
pub struct Prim {
cells: HashSet<Coord>,
}
impl Prim {
pub fn new(maze: &Maze) -> Prim {
let mut cells = HashSet::new();
cells.insert(maze.start);
Prim { cells }
}
fn random_cell(&mut self, random: &mut StdRng) -> Option<Coord> {
if self.cells.is_empty() {
return None;
}
let cell_list: Vec<_> = self.cells.iter().collect();
// Unwrap is safe here because of the is_empty check above
let cell = random.choose(&cell_list).unwrap();
Some(**cell)
}
}
impl Generator for Prim {
fn is_done(&self) -> bool {
self.cells.is_empty()
}
fn tick(&mut self, maze: &mut Maze, random: &mut StdRng) -> Result<()> {
maze.highlight_bright.clear();
if let Some(cell) = self.random_cell(random) {
if (cell == maze.start || cell == maze.end) && maze.explored.contains(&cell) {
return Ok(());
}
maze.explored.insert(cell);
maze.highlight_medium.remove(&cell);
maze.highlight_bright.insert(cell);
self.cells.remove(&cell);
let explored_neighbours: Vec<_> = maze
.neighbours(&cell)
.into_iter()
.filter(|(n, _)| maze.explored.contains(n))
.collect();
let unknown_neighbours: Vec<_> = maze
.neighbours(&cell)
.into_iter()
.filter(|(n, _)| !maze.explored.contains(n))
.collect();
if let Some((_, direction)) = random.choose(&explored_neighbours) {
let wall = match direction {
Direction::North => maze.north_wall(&cell),
Direction::East => maze.east_wall(&cell),
Direction::South => maze.south_wall(&cell),
Direction::West => maze.west_wall(&cell),
};
maze.walls.remove(&wall);
}
for (unknown_neighbour, _) in unknown_neighbours {
maze.highlight_medium.insert(unknown_neighbour);
self.cells.insert(unknown_neighbour);
}
}
Ok(())
}
}
#[derive(PartialEq, Eq)]
enum EllerMode {
Horizontal,
Vertical,
}
pub struct Eller {
current: Coord,
last_row: i32,
mode: EllerMode,
coord_to_set: HashMap<Coord, usize>,
set_to_coords: HashMap<usize, Vec<Coord>>,
last_set: usize,
}
impl Eller {
pub fn new(maze: &Maze) -> Eller {
let current = [0, 0].into();
let mut coord_to_set = HashMap::new();
let mut set_to_coords = HashMap::new();
set_to_coords.insert(0, vec![current]);
coord_to_set.insert(current, 0);
Eller {
current,
last_row: maze.maze_height() as i32 - 1,
mode: EllerMode::Horizontal,
coord_to_set,
set_to_coords,
last_set: 0,
}
}
fn same_set(&mut self, c1: &Coord, c2: &Coord) -> bool {
if let Some(c1_set_idx) = self.coord_to_set.get(c1) {
if let Some(c2_set_idx) = self.coord_to_set.get(c2) {
return c1_set_idx == c2_set_idx;
}
}
false
}
fn new_set(&mut self, c: Coord) {
let next_set = self.last_set + 1;
self.add(c, next_set);
self.last_set = next_set;
}
#[allow(clippy::map_entry)]
fn add(&mut self, c: Coord, set: usize) {
if self.coord_to_set.contains_key(&c) {
let current_set = self.coord_to_set[&c];
if current_set == set {
return;
}
self.coord_to_set.remove(&c);
self.set_to_coords
.get_mut(¤t_set)
.unwrap()
.remove_item(&c);
self.add(c, set);
if self.set_to_coords[¤t_set].is_empty() {
self.set_to_coords.remove(¤t_set);
}
} else {
self.coord_to_set.insert(c, set);
if self.set_to_coords.contains_key(&set) {
self.set_to_coords.get_mut(&set).unwrap().push(c);
} else {
self.set_to_coords.insert(set, vec![c]);
}
}
}
fn join(&mut self, c1: &Coord, c2: Coord) {
if self.same_set(c1, &c2) {
unreachable!()
}
let c1_set_idx = self.coord_to_set[c1];
if !self.coord_to_set.contains_key(&c2) {
self.add(c2, c1_set_idx);
} else {
let c2_set_idx = self.coord_to_set[&c2];
let c2_set: Vec<Coord> = self.set_to_coords[&c2_set_idx].to_vec();
for &cell in &c2_set {
self.add(cell, c1_set_idx);
}
}
}
fn connected_vertically(&self, set: usize, maze: &Maze) -> bool {
self.set_to_coords[&set]
.iter()
.filter(|c| c.y == self.current.y)
.any(|c| {
let wall = maze.south_wall(c);
!maze.walls.contains(&wall)
})
}
}
impl Generator for Eller {
fn is_done(&self) -> bool {
self.mode == EllerMode::Vertical && self.current.y == self.last_row
}
fn tick(&mut self, maze: &mut Maze, random: &mut StdRng) -> Result<()> {
maze.highlight_bright.clear();
maze.highlight_medium.clear();
maze.highlight_dark.clear();
for x in 0..maze.maze_width() as i32 {
maze.highlight_dark.insert([x, self.current.y].into());
}
maze.highlight_bright.insert(self.current);
maze.explored.insert(self.current);
match self.mode {
EllerMode::Horizontal => {
let current = self.current;
let last_row = current.y == self.last_row;
if let Some(neighbour) = maze.neighbour(¤t, &Direction::East) {
if !self.same_set(¤t, &neighbour) && (last_row || random.gen()) {
self.join(¤t, neighbour);
let wall = maze.east_wall(¤t);
maze.walls.remove(&wall);
maze.highlight_medium.insert(neighbour);
} else if !self.coord_to_set.contains_key(&neighbour) {
self.new_set(neighbour);
}
self.current = neighbour;
} else {
self.mode = EllerMode::Vertical;
}
}
EllerMode::Vertical => {
let current_set = self.coord_to_set[&self.current];
let last_in_set = maze
.neighbour(&self.current, &Direction::West)
.filter(|c| self.coord_to_set[&c] == current_set)
.is_none();
let connected = self.connected_vertically(current_set, maze);
let force_join = last_in_set && !connected;
let current = self.current;
if force_join || random.gen() {
if let Some(neighbour) = maze.neighbour(¤t, &Direction::South) {
self.join(¤t, neighbour);
let wall = maze.south_wall(¤t);
maze.walls.remove(&wall);
maze.explored.insert(neighbour);
maze.highlight_medium.insert(neighbour);
}
}
if let Some(neighbour) = maze.neighbour(&self.current, &Direction::West) {
self.current = neighbour;
} else {
self.mode = EllerMode::Horizontal;
if let Some(neighbour) = maze.neighbour(&self.current, &Direction::South) {
if self.coord_to_set.get(&neighbour).is_none() {
self.new_set(neighbour);
}
self.current = neighbour;
}
}
}
}
Ok(())
}
}
#[derive(PartialEq, Eq)]
enum HuntKillMode {
Hunt,
Kill,
}
pub struct HuntKill {
current: Option<Coord>,
last_completed_column: i32,
last_completed_row: i32,
mode: HuntKillMode,
}
impl HuntKill {
pub fn new(_maze: &Maze) -> HuntKill {
HuntKill {
current: Some(Coord { x: 0, y: 0 }),
last_completed_column: 0,
last_completed_row: 0,
mode: HuntKillMode::Kill,
}
}
fn available_neighbour(&self, maze: &Maze, random: &mut StdRng) -> Option<(Coord, Direction)> {
let current = match self.current {
Some(ref current) => current,
None => return None,
};
let mut neighbours = maze.neighbours(current);
random.shuffle(&mut neighbours);
neighbours
.into_iter()
.find(|(c, _)| !maze.explored.contains(&c))
}
fn current_row(&self, maze: &Maze) -> Vec<Coord> {
let current = match self.current {
Some(ref current) => current,
None => return vec![],
};
(0..maze.maze_width() as i32)
.into_iter()
.map(|x| [x, current.y].into())
.collect()
}
fn visited_neighbour(&self, maze: &Maze, random: &mut StdRng) -> Option<(Coord, Direction)> {
let current = match self.current {
Some(ref current) => current,
None => return None,
};
if maze.explored.contains(¤t) {
return None;
}
let mut neighbours = maze.neighbours(current);
random.shuffle(&mut neighbours);
neighbours
.into_iter()
.find(|(c, _)| maze.explored.contains(&c))
}
fn tick_kill(&mut self, maze: &mut Maze, random: &mut StdRng) -> Result<()> {
let current = match self.current {
Some(ref current) => *current,
None => return Ok(()),
};
maze.highlight_bright.insert(current);
maze.highlight_medium.insert(current);
maze.explored.insert(current);
match self.available_neighbour(&maze, random) {
Some((neighbour, _)) => {
maze.link(¤t, &neighbour)?;
self.current = Some(neighbour);
}
None => {
self.mode = HuntKillMode::Hunt;
self.current = Some([self.last_completed_column, self.last_completed_row].into());
}
};
Ok(())
}
fn tick_hunt(&mut self, maze: &mut Maze, random: &mut StdRng) -> Result<()> {
maze.highlight_medium.clear();
let current = match self.current {
Some(ref current) => *current,
None => return Ok(()),
};
for c in self.current_row(maze) {
maze.highlight_medium.insert(c);
}
maze.highlight_bright.insert(current);
match self.visited_neighbour(&maze, random) {
Some((neighbour, _)) => {
maze.highlight_medium.clear();
maze.highlight_bright.insert(neighbour);
maze.link(¤t, &neighbour)?;
self.last_completed_column = current.x;
self.mode = HuntKillMode::Kill;
}
None => {
if let Some(neighbour) = maze.neighbour(¤t, &Direction::East) {
self.current = Some(neighbour);
} else if current.y < maze.maze_height() as i32 - 1 {
if self
.current_row(maze)
.iter()
.all(|c| maze.explored.contains(c))
{
self.last_completed_row = current.y + 1;
}
self.current = Some([0, current.y + 1].into());
} else {
self.current = None;
}
}
};
Ok(())
}
}
impl Generator for HuntKill {
fn is_done(&self) -> bool {
self.current.is_none()
}
fn tick(&mut self, maze: &mut Maze, random: &mut StdRng) -> Result<()> {
maze.highlight_bright.clear();
match self.mode {
HuntKillMode::Hunt => self.tick_hunt(maze, random),
HuntKillMode::Kill => self.tick_kill(maze, random),
}
}
}
|
mod hextable;
use hextable::hextable;
use std::str::from_utf8;
pub fn encode(inp: String) -> String {
let byt: Vec<u8> = inp.into_bytes();
let mut outbytes: Vec<u8> = vec![];
for b in byt{
match b {
0x20|48..=57|65..=89|97..=122 => outbytes.push(b),
b'%' => {
outbytes.push(b);
outbytes.push(b);
}
_ => {
let hext= hextable(b);
outbytes.push(b'%');
outbytes.push(hext[0]);
outbytes.push(hext[1]);
}
}
}
from_utf8(&outbytes).unwrap().to_string()
}
pub fn decode(inp: String) -> Result<String, &'static str> {
let byt: Vec<u8> = inp.into_bytes();
let mut state: i32 = 0;
let mut outstr_u8: Vec<u8> = vec![];
let mut convstr: String = "".to_string();
for b in byt{
match state {
0 => {
match b {
0x20|48..=57|65..=89|97..=122 => outstr_u8.push(b),
b'%' => state = 1, // '%' is escape character here
_ => {
println!("Illegal character \'{}\' in decode stream", b);
return Err("Illegal character in decode stream")
}
}
}
1 => {
if u8::is_ascii_hexdigit(&b) {
convstr.push(char::from(b));
state = 2;
}
else if b == b'%' {
outstr_u8.push(b);
state = 0;
}
else {
println!("Illegal character \'{}\' in decode stream 2", b);
return Err("Illegal character in hex decode 2");
}
}
2 => {
if u8::is_ascii_hexdigit(&b) { convstr.push(char::from(b)); }
else {
println!("Illegal character \'{}\' in decode stream 3", b);
return Err("Illegal character in hex decode 3");
}
let t = u8::from_str_radix(&convstr, 16);
match t {
Ok(c) => outstr_u8.push(c),
Err(_) => return Err("Failed to convert hex in decode"),
}
state = 0;
convstr = "".to_string();
}
_ => return Err("Illegal state in percent_decode"),
}
}
Ok(from_utf8(&outstr_u8).unwrap().to_string())
}
#[cfg(test)]
mod tests {
use crate::{encode, decode};
#[test]
fn it_works() {
//test encode, decode
let test1 = r#"dsrich@hot über mail.com"#.to_string();
let test2 = r#"dsrich%40hot %c3%bcber mail%2ecom"#.to_string();
let test3 = r#"dsrich%%40hot %%c3%%bcber mail%%2ecom"#.to_string();
let test4 = r#"\"fu*t[]{}~ ü!@#$%^&*()_+`-=|"#.to_string();
assert_eq!(encode(test1.clone()), test2.clone());
assert_eq!(decode(encode(test4.clone())).unwrap(), test4.clone());
assert_eq!(encode(test2.clone()), test3.clone());
assert_eq!(encode(encode(test1.clone())), test3.clone());
assert_eq!(decode(encode(test1.clone())).unwrap(), test1.clone());
assert_eq!(decode(decode(encode(encode(test1.clone()))).unwrap()).unwrap(), test1.clone());
assert_eq!(decode(encode(test2.clone())).unwrap(), test2.clone());
assert_eq!(decode(encode(test4.clone())).unwrap(), test4.clone());
assert_eq!(decode(test2.clone()).unwrap(), test1.clone());
assert_eq!(decode(test3.clone()).unwrap(), test2);
assert_eq!(decode(decode(test3).unwrap()).unwrap(), test1);
}
} |
pub const ROM_START: u16 = 0x0000;
pub const ROM_END: u16 = 0x1fff;
pub const WRAM_START: u16 = 0x2000;
pub const WRAM_END: u16 = 0x23ff;
pub const VRAM_START: u16 = 0x2400;
pub const VRAM_END: u16 = 0x3fff;
//pub const RAM_MIRROR_START: u16 = 0x4000;
|
use std::iter::from_fn;
fn digits_inc_or_same(num: u32) -> bool {
let mut it = rev_digit_iter(num);
let mut lastd = it.next().unwrap();
for d in it {
if lastd < d {
return false;
}
lastd = d;
}
true
}
fn adjacent_digits_same(num: u32) -> bool {
let mut it = rev_digit_iter(num);
let mut lastd = it.next().unwrap();
for d in it {
if lastd == d {
return true;
}
lastd = d;
}
false
}
fn adjacent_digits_same_advanced(num: u32) -> bool {
let mut count = 1;
let mut it = rev_digit_iter(num);
let mut lastd = it.next().unwrap();
for d in it {
if lastd == d {
count += 1;
} else {
if count == 2 {
return true;
}
count = 1;
}
lastd = d;
}
if count == 2 {
return true;
}
false
}
fn rev_digit_iter(mut num: u32) -> impl Iterator<Item = u32> {
from_fn(move || {
if num == 0 {
None
} else {
let d = num % 10;
num /= 10;
Some(d)
}
})
}
fn main() {
let p1 = (138_241..=674_034)
.filter(|&x| digits_inc_or_same(x) && adjacent_digits_same(x))
.count();
let p2 = (138_241..=674_034)
.filter(|&x| digits_inc_or_same(x) && adjacent_digits_same_advanced(x))
.count();
println!("p1: {}", p1);
println!("p2: {}", p2);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_digits_inc_or_same() {
assert_eq!(digits_inc_or_same(111_111), true);
assert_eq!(digits_inc_or_same(223_450), false);
}
#[test]
fn test_two_adjacent_digits_same() {
assert_eq!(adjacent_digits_same(122_345), true);
assert_eq!(adjacent_digits_same(111_111), true);
}
#[test]
fn test_adjacent_digits_same_advanced() {
assert_eq!(adjacent_digits_same_advanced(122_345), true);
assert_eq!(adjacent_digits_same_advanced(111_122), true);
assert_eq!(adjacent_digits_same_advanced(123_444), false);
assert_eq!(adjacent_digits_same_advanced(112_233), true);
assert_eq!(adjacent_digits_same_advanced(111_111), false);
assert_eq!(adjacent_digits_same_advanced(112_222), true);
}
}
|
/// given index 'n' and a cache, recursively generate fibonacci numbers, caching the results
pub fn fib(n: usize, cache: &mut Vec<u64>) -> Result<u64, String> {
// using the magic of match to handle the initial conditions,
// and then recursion to handle the rest.
if let Some(result) = cache.get(n) {
Ok(*result)
} else {
// cache miss, recurses back until the first cache hit,
// then populates and returns from the cache up the stack
let minus2 = fib(n - 2, cache)?;
let minus1 = fib(n - 1, cache)?;
let (result, overflow) = minus2.overflowing_add(minus1);
if overflow {
Err(format!("Overflow at fibonacci index {}", n))
} else {
cache.push(result);
fib(n, cache)
}
}
}
/// generate correctly-seeded fibonnaci cache vec
pub fn fib_cache() -> Vec<u64> {
// cache is always instantiated with first two fib seeds
vec![0, 1]
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
/// ensure sane value and cache at n: 0
fn fib0() {
let mut cache = fib_cache();
assert_eq!(fib(0, &mut cache), Ok(0));
// fib_cache is always primed with two values
assert_eq!(cache.len(), 2);
}
#[test]
/// ensure sane value and cache at n: 0
fn fib1() {
let mut cache = fib_cache();
assert_eq!(fib(1, &mut cache), Ok(1));
assert_eq!(cache.len(), 2);
}
#[test]
/// ensure sane value and cache at n: 10, cache should be len n + 1
fn fib10() {
let mut cache = fib_cache();
assert_eq!(fib(10, &mut cache), Ok(55));
assert_eq!(cache.len(), 11);
}
#[test]
#[allow(clippy::unreadable_literal)]
/// ensure sane value and cache at n: 93, max n value before overflow
fn fib93() {
let mut cache = fib_cache();
assert_eq!(fib(93, &mut cache), Ok(12200160415121876738));
assert_eq!(cache.len(), 94);
}
#[test]
/// value should overflow, cache should not grow beyond 94
fn fib94() {
let mut cache = fib_cache();
assert!(fib(94, &mut cache).is_err());
assert_eq!(cache.len(), 94);
}
#[test]
/// error should still be returned, cache should still be overflow size
fn fib200() {
let mut cache = fib_cache();
assert!(fib(200, &mut cache).is_err());
assert_eq!(cache.len(), 94);
}
}
|
use std::fs;
fn parse(input: String) -> Vec<Vec<char>> {
input
.lines()
.map(|x| x.chars().collect())
.collect()
}
fn pos(y: usize, x: usize, input: &Vec<Vec<char>>) -> Option<char> {
// we start with x = 0. y = 0
// with x we go right infintely
// with y we go to the bottom and then return an error
let y_size = input.len();
assert!(y_size > 0);
let x_size = input[0].len();
if y >= y_size {
None
} else {
Some(input[y][x % x_size])
}
}
fn slide(y: usize, x: usize, step_y: usize, step_x: usize, input: &Vec<Vec<char>>, acc: i32) -> i32 {
match pos(y, x, input) {
Some('.') => slide(y + step_y, x + step_x, step_y, step_x, input, acc),
Some('#') => slide(y + step_y, x + step_x, step_y, step_x, input, acc + 1),
Some(_) => panic!("this should not happen"),
None => acc
}
}
fn main() {
let contents_example = fs::read_to_string("input_example.txt")
.expect("error loading test file");
let test_vec = parse(contents_example);
assert_eq!(pos(0, 0, &test_vec), Some('.'));
assert_eq!(pos(1, 3, &test_vec), Some('.'));
assert_eq!(pos(0, 13, &test_vec), Some('#'));
assert_eq!(pos(11, 0, &test_vec), None);
let test_result = slide(0, 0, 1, 3, &test_vec, 0);
println!("test result = {}", test_result);
let contents = fs::read_to_string("input.txt")
.expect("error loading file");
let vec = parse(contents);
let result1 = slide(0, 0, 1, 3, &vec, 0);
println!("ex 1 result = {}", result1);
let slopes = [(1,1), (1,3), (1,5), (1,7), (2,1)];
let result2 = slopes.iter().fold(1, |acc, &x| acc * slide(0, 0, x.0, x.1, &vec, 0));
println!("ex 2 result = {}", result2);
} |
use std::thread;
//哲学家
struct Philosopher{
name:String
}
impl Philosopher {
//创建一个新的哲学家.
fn new (name:&str) -> Philosopher {
Philosopher{
name:name.to_string(),
}
}
//eat
fn eat(&self) {
println!("{},is eating!",self.name);
thread::sleep_ms(1000);
println!("{},is done eating!",self.name);
}
}
fn main() {
//初始化5个
// let p1 = Philosopher { name: "Judith Butler"};
let mut name=String::new();
name.push_str("a");
//问题1, new 在此 scope中生成 一份, 到new 方法中再重新 生成 一份. 也就是同一个值,但在内存中heap中有两份.
let a =Philosopher::new(name.trim());// 用 name.trim()生成 &str
//经验:传入方法中后, 参数 就会变为&str
// let a =Philosopher::new(name);
// let a =Philosopher::new("a"); // 直接放入"a" 参数类型也是 &str的
let philosophers = vec![
a,Philosopher::new("b"),Philosopher::new("c"),Philosopher::new("d"),Philosopher::new("e"),Philosopher::new("f")
];
let handles:Vec<_> = philosophers.into_iter().map(|p| {
thread::spawn(move || {
p.eat();
})
}).collect();
for h in handles {
h.join().unwrap();
}
//
// for p in &philosophers {
// p.eat();
// }
} |
//! Initialize variables at runtime which then behave like static variables.
//!
//! ```rust
//! extern crate late_static;
//! use late_static::LateStatic;
//!
//! struct Foo {
//! pub value: u32,
//! }
//!
//! static FOO: LateStatic<Foo> = LateStatic::new();
//!
//! fn main() {
//! unsafe {
//! LateStatic::assign(&FOO, Foo { value: 42 });
//! }
//! println!("{}", FOO.value);
//! }
//! ```
#![cfg_attr(not(test), no_std)]
use core::cell::UnsafeCell;
/// Static value that is manually initialized at runtime.
pub struct LateStatic<T> {
val: UnsafeCell<Option<T>>,
}
unsafe impl<T: Send> core::marker::Send for LateStatic<T> {}
unsafe impl<T: Sync> core::marker::Sync for LateStatic<T> {}
impl<T> LateStatic<T> {
/// Construct a LateStatic.
pub const fn new() -> Self {
LateStatic {
val: UnsafeCell::new(None),
}
}
/// Assign a value to the late static.
///
/// This only works once. A second call to assign for a given variable will panic.
///
/// # Safety
///
/// This is completely unsafe if there is even the slightest chance of another
/// thread trying to dereference the variable.
pub unsafe fn assign(instance: &LateStatic<T>, val: T) {
let option: &mut Option<T> = &mut *instance.val.get();
if option.is_some() {
panic!("Second assignment to late static");
} else {
*option = Some(val);
}
}
/// Invalidate the late static by removing its inner value.
///
/// # Safety
///
/// This is completely unsafe if there is even the slightest chance of another
/// thread trying to dereference the variable.
pub unsafe fn clear(instance: &LateStatic<T>) {
if !Self::has_value(instance) {
panic!("Tried to clear a late static without a value");
}
let option: &mut Option<T> = &mut *instance.val.get();
*option = None;
}
/// Whether a value is assigned to this LateStatic.
///
/// # Safety
///
/// This is completely unsafe if there is even the slightest chance of another
/// thread trying to dereference the variable.
pub unsafe fn has_value(instance: &LateStatic<T>) -> bool {
let option: &Option<T> = &*instance.val.get();
option.is_some()
}
}
impl<T> core::ops::Deref for LateStatic<T> {
type Target = T;
fn deref(&self) -> &T {
unsafe {
let option: &Option<T> = &*self.val.get();
match option {
Some(ref val) => val,
None => panic!("Dereference of late static before a value was assigned"),
}
}
}
}
impl<T> core::ops::DerefMut for LateStatic<T> {
fn deref_mut(&mut self) -> &mut T {
unsafe {
let option: &mut Option<T> = &mut *self.val.get();
match option {
Some(ref mut val) => val,
None => panic!("Dereference of late static before a value was assigned"),
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
static ASSIGN_ONCE_TEST: LateStatic<u32> = LateStatic::new();
#[test]
fn assign_once() {
unsafe {
assert!(!LateStatic::has_value(&ASSIGN_ONCE_TEST));
LateStatic::assign(&ASSIGN_ONCE_TEST, 42);
assert!(LateStatic::has_value(&ASSIGN_ONCE_TEST));
}
}
static ASSIGN_TWICE_TEST: LateStatic<u32> = LateStatic::new();
#[test]
#[should_panic]
fn assign_twice() {
unsafe {
LateStatic::assign(&ASSIGN_TWICE_TEST, 42);
LateStatic::assign(&ASSIGN_TWICE_TEST, 37);
}
}
struct Foo {
pub value: u32,
}
static DEREF_CONST_TEST: LateStatic<Foo> = LateStatic::new();
#[test]
fn deref_const() {
unsafe {
LateStatic::assign(&DEREF_CONST_TEST, Foo { value: 42 });
}
assert_eq!(DEREF_CONST_TEST.value, 42);
}
static mut DEREF_MUT_TEST: LateStatic<Foo> = LateStatic::new();
#[test]
fn deref_mut() {
unsafe {
LateStatic::assign(&DEREF_MUT_TEST, Foo { value: 42 });
assert_eq!(DEREF_MUT_TEST.value, 42);
DEREF_MUT_TEST.value = 37;
assert_eq!(DEREF_MUT_TEST.value, 37);
}
}
static mut DEREF_WITHOUT_VALUE: LateStatic<Foo> = LateStatic::new();
#[test]
#[should_panic]
fn deref_without_value() {
unsafe {
#[allow(clippy::no_effect)]
DEREF_WITHOUT_VALUE.value;
}
}
static mut CLEAR_TEST: LateStatic<Foo> = LateStatic::new();
#[test]
fn clear() {
unsafe {
LateStatic::assign(&CLEAR_TEST, Foo { value: 42 });
assert_eq!(CLEAR_TEST.value, 42);
LateStatic::clear(&CLEAR_TEST);
assert!(!LateStatic::has_value(&CLEAR_TEST));
}
}
static mut CLEAR_WITHOUT_VALUE: LateStatic<Foo> = LateStatic::new();
#[test]
#[should_panic]
fn clear_without_value() {
unsafe {
LateStatic::clear(&CLEAR_WITHOUT_VALUE);
}
}
}
|
//! # PubNub Hyper
//!
//! A PubNub client using [`hyper`](hyper) and [`tokio`](tokio) to provide an
//! ultra-fast, incredibly reliable message transport over the PubNub edge
//! network.
//!
//! Uses [`pubnub-core`](pubnub-core) under the hood.
//!
//! # Example
//!
//! ```no_run
//! use futures_util::stream::StreamExt;
//! use pubnub_hyper::runtime::tokio_global::TokioGlobal;
//! use pubnub_hyper::transport::hyper::Hyper;
//! use pubnub_hyper::{core::data::channel, core::json::object, Builder};
//!
//! # async {
//! let transport = Hyper::new()
//! .publish_key("demo")
//! .subscribe_key("demo")
//! .build()?;
//! let mut pubnub = Builder::new()
//! .transport(transport)
//! .runtime(TokioGlobal)
//! .build();
//!
//! let message = object! {
//! "username" => "JoeBob",
//! "content" => "Hello, world!",
//! };
//!
//! let channel_name: channel::Name = "my-channel".parse().unwrap();
//! let mut stream = pubnub.subscribe(channel_name.clone()).await;
//! let timetoken = pubnub.publish(channel_name, message.clone()).await?;
//!
//! let received = stream.next().await;
//! assert_eq!(received.unwrap().json, message);
//! # Ok::<(), Box<dyn std::error::Error>>(())
//! # };
//! ```
#![deny(
clippy::all,
clippy::pedantic,
missing_docs,
missing_debug_implementations,
missing_copy_implementations,
intra_doc_link_resolution_failure
)]
#![allow(clippy::doc_markdown)]
#![forbid(unsafe_code)]
/// Re-export core for ease of use.
pub mod core {
pub use pubnub_core::*;
}
/// A sensible default variant of the tokio runtime.
pub use crate::runtime::tokio_global::TokioGlobal as DefaultRuntime;
/// A sensible default variant of the hyper runtime.
pub use crate::transport::hyper::Hyper as DefaultTransport;
pub use crate::core::Builder;
use crate::core::PubNub as CorePubNub;
/// PubNub client bound to hyper transport and tokio runtime.
pub type PubNub = CorePubNub<DefaultTransport, DefaultRuntime>;
pub mod runtime;
pub mod transport;
#[macro_use]
mod macros;
|
use std::collections::BTreeMap;
use std::io::BufRead;
fn main() {
let mut counts = BTreeMap::new();
let stdin = std::io::stdin();
for line_or_error in stdin.lock().lines() {
let line = line_or_error.unwrap();
*counts.entry(line).or_insert(0);
}
for (line, count) in counts.iter() {
println!("{} {}", count, line);
}
}
|
use nom::{
branch::alt,
bytes::complete::{is_a, tag, take_while1, take_while_m_n},
combinator::{map_res, peek},
sequence::preceded,
IResult,
};
use crate::grammar::ast::NumLit;
use crate::grammar::ast::{eq::AstEq, Expression};
use crate::grammar::model::{HasSourceReference, WrightInput};
use crate::grammar::parsers::with_input;
use crate::grammar::tracing::parsers::map;
use crate::grammar::tracing::trace_result;
use std::fmt::Debug;
use std::num::ParseIntError;
impl<T: Debug + Clone> NumLit<T> {
/// Name used to refer to this parser in traces.
pub const TRACE_NAME: &'static str = "NumLit";
}
impl<I: WrightInput> NumLit<I> {
fn new(source: I, num: u128) -> Self {
Self { source, inner: num }
}
/// Convert a number from a string using base 16.
fn from_hex(input: &str) -> Result<u128, std::num::ParseIntError> {
u128::from_str_radix(input, 16)
}
/// Convert a number from a string using base 10.
pub(super) fn from_dec(input: &str) -> Result<u128, std::num::ParseIntError> {
u128::from_str_radix(input, 10)
}
/// Convert a number from a string using base 2.
fn from_bin(input: &str) -> Result<u128, std::num::ParseIntError> {
u128::from_str_radix(input, 2)
}
/// Remove all underscores from a string.
fn clear_underscores(input: &str) -> String {
let res = input.replace("_", "");
res
}
/// Parse a properly formatted hexadecimal number.
fn hex_primary(input: I) -> IResult<I, u128> {
map_res(
preceded(
tag("0x"),
preceded(
peek(take_while_m_n(1, 1, |c: char| c.is_ascii_hexdigit())),
take_while1(|c: char| c.is_ascii_hexdigit() || c == '_'),
),
),
|source: I| -> Result<u128, ParseIntError> {
let mut s = String::from(source.into());
s = Self::clear_underscores(&s);
Self::from_hex(&s)
},
)(input)
}
/// Parse a properly formatted binary number.
fn bin_primary(input: I) -> IResult<I, u128> {
map_res(
preceded(
tag("0b"),
preceded(
peek(take_while_m_n(1, 1, |c: char| c == '1' || c == '0')),
is_a("10_"),
),
),
|source: I| -> Result<u128, ParseIntError> {
let mut s: String = source.into();
s = Self::clear_underscores(&s);
Self::from_bin(&s)
},
)(input)
}
/// Parse a properly formatted positive decimal integer.
pub(super) fn dec_primary(input: I) -> IResult<I, u128> {
map_res(
preceded(
peek(take_while_m_n(1, 1, |c: char| c.is_ascii_digit())),
take_while1(|c: char| c.is_ascii_digit() || c == '_'),
),
|source: I| -> Result<u128, ParseIntError> {
let mut s = source.into();
s = Self::clear_underscores(&s);
Self::from_dec(&s)
},
)(input)
}
/// Parse a numerical literal to a value.
pub fn parse(input: I) -> IResult<I, Self> {
let constructor = |(source, num)| Self::new(source, num);
let res = alt((
map(with_input(Self::bin_primary), constructor),
map(with_input(Self::hex_primary), constructor),
map(with_input(Self::dec_primary), constructor),
))(input.trace_start_clone(Self::TRACE_NAME));
trace_result(Self::TRACE_NAME, res)
}
}
impl<I: Debug + Clone> HasSourceReference<I> for NumLit<I> {
fn get_source_ref(&self) -> &I {
&self.source
}
}
impl<I: Debug + Clone> Into<Expression<I>> for NumLit<I> {
fn into(self) -> Expression<I> {
Expression::NumLit(self)
}
}
impl<I: Debug + Clone> AstEq for NumLit<I> {
fn ast_eq(fst: &Self, snd: &Self) -> bool {
fst.inner == snd.inner
}
}
|
//! Labels interface
extern crate serde_json;
use self::super::{Github, Result};
pub struct Labels<'a> {
github: &'a Github,
owner: String,
repo: String,
}
impl<'a> Labels<'a> {
#[doc(hidden)]
pub fn new<O, R>(github: &'a Github, owner: O, repo: R) -> Labels<'a>
where
O: Into<String>,
R: Into<String>,
{
Labels {
github: github,
owner: owner.into(),
repo: repo.into(),
}
}
fn path(&self, more: &str) -> String {
format!("/repos/{}/{}/labels{}", self.owner, self.repo, more)
}
pub fn create(&self, lab: &LabelOptions) -> Result<Label> {
let data = serde_json::to_string(&lab)?;
self.github.post::<Label>(&self.path(""), data.as_bytes())
}
pub fn update(&self, prevname: &str, lab: &LabelOptions) -> Result<Label> {
let data = serde_json::to_string(&lab)?;
self.github.patch::<Label>(
&self.path(&format!("/{}", prevname)),
data.as_bytes(),
)
}
pub fn delete(&self, name: &str) -> Result<()> {
self.github.delete(&self.path(&format!("/{}", name))).map(
|_| (),
)
}
pub fn list(&self) -> Result<Vec<Label>> {
self.github.get::<Vec<Label>>(&self.path(""))
}
}
// representations
#[derive(Debug, Serialize)]
pub struct LabelOptions {
pub name: String,
pub color: String,
}
impl LabelOptions {
pub fn new<N, C>(name: N, color: C) -> LabelOptions
where
N: Into<String>,
C: Into<String>,
{
LabelOptions {
name: name.into(),
color: color.into(),
}
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Label {
pub url: String,
pub name: String,
pub color: String,
}
|
use backoff::backoff::Backoff;
use backoff::ExponentialBackoff;
use libp2p::PeerId;
use lru::LruCache;
use std::num::NonZeroUsize;
use std::ops::Add;
use std::time::Instant;
/// Details about temporary ban, used to track lifecycle of retries
#[derive(Debug)]
struct TemporaryBan {
backoff: ExponentialBackoff,
next_release: Option<Instant>,
}
impl TemporaryBan {
/// Create new temporary ban
fn new(backoff: ExponentialBackoff) -> Self {
let mut instance = Self {
backoff,
next_release: None,
};
instance.backoff.reset();
instance.next_release = instance
.backoff
.next_backoff()
.map(|duration| Instant::now().add(duration));
instance
}
/// Whether ban is currently active and not expired
fn is_active(&self) -> bool {
if let Some(next_release) = self.next_release {
next_release > Instant::now()
} else {
true
}
}
/// Extend temporary ban if it expired already expired, do nothing otherwise
fn try_extend(&mut self) {
let now = Instant::now();
if let Some(next_release) = self.next_release {
if next_release > now {
// Old ban if still active, no need to extend it
return;
}
} else {
// Ban is permanent
return;
}
self.next_release = self
.backoff
.next_backoff()
.map(|duration| now.add(duration));
}
}
/// Collection of temporary bans that help to prevent reaching out to the same peer ID over and
/// over again.
#[derive(Debug)]
pub(crate) struct TemporaryBans {
backoff: ExponentialBackoff,
list: LruCache<PeerId, TemporaryBan>,
}
impl TemporaryBans {
pub(super) fn new(capacity: NonZeroUsize, backoff: ExponentialBackoff) -> Self {
Self {
backoff,
list: LruCache::new(capacity),
}
}
/// Checks if peer is currently banned.
///
/// `false` means peer either is not banned at all or previous temporary ban has expired and
/// new connection attempt is allowed to be made.
pub(crate) fn is_banned(&self, peer_id: &PeerId) -> bool {
self.list
.peek(peer_id)
.map(TemporaryBan::is_active)
.unwrap_or_default()
}
/// Create temporary ban for peer or extend existing ban
pub(crate) fn create_or_extend(&mut self, peer_id: &PeerId) {
if let Some(ban) = self.list.get_mut(peer_id) {
ban.try_extend();
} else {
self.list
.put(*peer_id, TemporaryBan::new(self.backoff.clone()));
}
}
/// Remove temporary ban for peer.
///
/// Returns `true` if there was an entry for peer during call.
pub(crate) fn remove(&mut self, peer_id: &PeerId) -> bool {
self.list.pop(peer_id).is_some()
}
}
|
pub fn immutable_vars(){
let name = "Igor";
let age = 23;
println!("My name is {} I am {} years old",name,age);
}
pub fn mutable_vars(){
let name = "Igor";
let mut age = 23;
println!("My name is {} I am {} years old",name,age);
age = 24;
println!("Next year i will be {} years old",age);
}
pub fn explicit_vars(){
//32 bit integer
let x : i32 = 69472;
let x2 : i32 = 443;
//64 bit integer
let y : i64 = 123123123;
//64 bit floating number
let z : f64 = 43.54;
//characters
let c : char = 'A';
//boolean valuse
let running : bool = true;
// expression booleans
let is_bigger: bool = x>x2;
println!("{:?}",(x,y,z,c,running,is_bigger));
} |
/*
Copyright (c) 2016-2017, Robert Ou <rqou@robertou.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
use std::env;
use std::os::unix::ffi::OsStrExt;
use std::process;
extern crate yavhdl;
use yavhdl::analyzer::*;
use yavhdl::parser;
fn main() {
let args: Vec<_> = env::args_os().collect();
if args.len() < 3 {
println!("Usage: {} [-e] work_lib_name file1.vhd, file2.vhd, ...",
args[0].to_string_lossy());
process::exit(-1);
}
// Construct the state blob
let mut s = AnalyzerCoreStateBlob::new();
// Parse the given identifier
let lib_was_ext_id = &args[1] == "-e";
let lib_name = if lib_was_ext_id {
&args[2]
} else {
&args[1]
};
// If the name parses as UTF-8, treat it as such. Otherwise treat it as
// Latin-1
let lib_id = match lib_name.to_str() {
Some(name_unicode) =>
Identifier::new_unicode(&mut s.sp, name_unicode, lib_was_ext_id),
None => {
let sp_idx = s.sp.add_latin1_str(lib_name.as_bytes());
Identifier::new_latin1(&mut s.sp, sp_idx, lib_was_ext_id)
}
}.unwrap();
// Create design database (ultimate container for everything)
s.design_db.populate_builtins();
// Create the library
let work_lib_idx = s.op_l.alloc();
{
let work_lib = s.op_l.get_mut(work_lib_idx);
*work_lib = Library::new(lib_id);
}
s.design_db.add_library(lib_id, work_lib_idx);
// Parse each file
for i in (if lib_was_ext_id {3} else {2})..args.len() {
println!("Parsing file \"{}\"...", args[i].to_string_lossy());
let (parse_output, parse_messages) = parser::parse_file(&args[i]);
if let Some(pt) = parse_output {
println!("Analyzing file \"{}\"...", args[i].to_string_lossy());
s.errors.clear();
s.warnings.clear();
let ret = vhdl_analyze_file(&mut s, &pt, work_lib_idx, &args[i]);
print!("{}", s.warnings);
if !ret {
// An error occurred
println!("ERRORS occurred during analysis!");
print!("{}", s.errors);
}
} else {
print!("{}", parse_messages);
}
}
println!("{}", s.design_db.debug_print(&s.sp, &s.op_l, &s.op_n, &s.op_s));
}
|
mod direct;
mod rmux;
mod routine;
//mod ws;
use tokio::io::AsyncRead;
use tokio::io::AsyncWrite;
pub use self::routine::routine_channels;
pub trait ChannelStream {
fn split(
&mut self,
) -> (
Box<dyn AsyncRead + Send + Unpin + '_>,
Box<dyn AsyncWrite + Send + Unpin + '_>,
);
fn close(&mut self) -> std::io::Result<()>;
}
pub async fn get_channel_stream(
channel: String,
addr: String,
) -> Result<Box<dyn ChannelStream + Send>, std::io::Error> {
//irect::get_direct_stream(addr).await
if channel == "direct" {
direct::get_direct_stream(addr).await
} else {
rmux::get_rmux_stream(channel.as_str(), addr).await
}
}
|
use std::env;
fn main() {
let mut args = env::args();
if args.len() != 2 {
eprint!("Usage: 9cc <code>\n");
return;
}
print!(".intel_syntax noprefix\n");
print!(".global main\n");
print!("main:\n");
print!(" mov rax, {}\n", args.nth(1).unwrap());
print!(" ret\n");
return;
} |
fn rot_matrix(v: &Vec<(usize, usize)>) -> Vec<(usize, usize)> {
let mut v_p = Vec::new();
let (x_ubound, y_ubound) = get_bounds(v);
for &(x, y) in v.iter() {
let x_p = y;
let y_p = x_ubound - x;
v_p.push((x_p, y_p));
}
v_p
}
fn get_bounds(v: &Vec<(usize, usize)>) -> (usize, usize) {
let mut x_ubound = 0;
let mut y_ubound = 0;
for &(x, y) in v.iter() {
if x > x_ubound {
x_ubound = x;
}
if y > y_ubound {
y_ubound = y;
}
}
(x_ubound, y_ubound)
}
fn tuplize(m: &Vec<Vec<u16>>) -> Vec<(usize, usize)> {
let mut tup_list = Vec::new();
for y in 0..m.len() {
for x in 0..m[y].len() {
if m[y][x] == 1 {
tup_list.push((x, y));
}
}
}
tup_list
}
fn matricize(v: &Vec<(usize, usize)>) -> Vec<Vec<u16>> {
let (x_ubound, y_ubound) = get_bounds(&v);
let mut m = vec![vec![0; x_ubound+1]; y_ubound+1];
for &(x, y) in v.iter() {
m[y][x] = 1;
}
m
}
fn print_vec(v: &Vec<(usize, usize)>) {
//todo
let (x_ubound, y_ubound) = get_bounds(&v);
for y in 0..y_ubound+1 {
for x in 0..x_ubound+1 {
if v.contains(&(x, y)) {
print!("1");
} else {
print!("0");
}
}
print!("\n");
}
}
fn print_matrix(m: &Vec<Vec<u16>>) {
for y in 0..m.len() {
for x in 0..m[y].len() {
print!("{}", m[y][x]);
}
print!("\n");
}
}
fn main() {
let m = vec![
vec![0, 0, 1, 0, 0],
vec![0, 1, 1, 1, 0],
vec![0, 0, 1, 0, 0],
vec![0, 0, 1, 0, 0],
vec![0, 0, 1, 0, 0],
vec![0, 0, 1, 0, 0],
];
let m = tuplize(&m);
println!("{:?}", m);
let m_p = rot_matrix(&m);
let m_p_p = rot_matrix(&m_p);
let m_p_p_p = rot_matrix(&m_p_p);
print_vec(&m);
print!("\n");
print_vec(&m_p);
print!("\n");
print_vec(&m_p_p);
print!("\n");
print_vec(&m_p_p_p);
}
#[test]
fn test_ubounds() {
let v = vec![
vec![0, 0, 1, 0, 0],
vec![0, 1, 1, 1, 0],
vec![0, 0, 1, 0, 0],
vec![0, 0, 1, 0, 0],
vec![0, 0, 1, 0, 0],
vec![0, 0, 1, 0, 0],
];
let v = tuplize(&v);
let (x_b, y_b) = get_bounds(&v);
assert_eq!(3, x_b);
assert_eq!(5, y_b);
}
#[test]
fn test_matricize() {
let v = vec![
vec![0, 0, 1, 0, 0],
vec![0, 1, 1, 1, 0],
vec![0, 0, 1, 0, 0],
vec![0, 0, 1, 0, 0],
vec![0, 0, 1, 0, 0],
vec![0, 0, 1, 0, 0],
];
let v = tuplize(&v);
let m = matricize(&v);
print_matrix(&m);
} |
extern crate opencv;
extern crate time;
use self::opencv::core;
use self::opencv::highgui;
use self::opencv::imgproc;
pub struct Camera {
pub cam: highgui::VideoCapture,
//pub red2_lower: core::Scalar,
//pub red2_upper: core::Scalar,
pub red_lower: core::Scalar,
pub red_upper: core::Scalar,
pub green_lower: core::Scalar,
pub green_upper: core::Scalar,
pub blue_lower: core::Scalar,
pub blue_upper: core::Scalar,
pub yellow_lower: core::Scalar,
pub yellow_upper: core::Scalar,
//pub red2: i32,
pub red: i32,
pub green: i32,
pub blue: i32,
pub yellow: i32,
pub colours: [i32; 4],
}
impl Camera {
pub fn init(&mut self) {}
pub fn get_colour(&mut self, visible: bool) -> i32 {
let mat = self.get_frame();
let colour = self.what_colour(mat, visible).unwrap();
return colour;
}
pub fn set_colour(&mut self, visible: bool, colour: i32) -> i32 {
let mat = self.get_frame();
self.pick_colour(mat, visible);
return colour;
}
pub fn search_colour(&mut self, colour_to_find: i32, visible: bool) -> bool {
let mat = self.get_frame();
let colour = self.what_colour(mat, false).unwrap();
return colour == colour_to_find;
}
pub fn dump_bounds(&mut self) {
println!("rl {:?}", self.red_lower);
println!("ru {:?}", self.red_upper);
println!("gl {:?}", self.green_lower);
println!("gu {:?}", self.green_upper);
println!("bl {:?}", self.blue_lower);
println!("bu {:?}", self.blue_upper);
println!("yl {:?}", self.yellow_lower);
println!("yu {:?}", self.yellow_upper);
}
pub fn discard_video(&mut self) {
let _ret = match self.cam.grab() {
Ok(v) => true,
Err(e) => false,
};
highgui::wait_key(1).unwrap();
}
// Colour bounds setters
pub fn set_red_lower(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
self.red_lower.data[i] = values[i];
}
}
pub fn set_red_upper(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
self.red_upper.data[i] = values[i];
}
}
pub fn set_green_lower(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
self.green_lower.data[i] = values[i];
}
}
pub fn set_green_upper(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
self.green_upper.data[i] = values[i];
}
}
pub fn set_blue_lower(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
self.blue_lower.data[i] = values[i];
}
}
pub fn set_blue_upper(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
self.blue_upper.data[i] = values[i];
}
}
pub fn set_yellow_lower(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
self.yellow_lower.data[i] = values[i];
}
}
pub fn set_yellow_upper(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
self.yellow_upper.data[i] = values[i];
}
}
// Colour bounds getters
pub fn get_red_lower(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
values[i] = self.red_lower.data[i];
}
}
pub fn get_red_upper(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
values[i] = self.red_upper.data[i];
}
}
pub fn get_green_lower(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
values[i] = self.green_lower.data[i];
}
}
pub fn get_green_upper(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
values[i] = self.green_upper.data[i];
}
}
pub fn get_blue_lower(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
values[i] = self.blue_lower.data[i];
}
}
pub fn get_blue_upper(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
values[i] = self.blue_upper.data[i];
}
}
pub fn get_yellow_lower(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
values[i] = self.yellow_lower.data[i];
}
}
pub fn get_yellow_upper(&mut self, values: &mut [f64; 4]) {
for i in 0..3 {
values[i] = self.yellow_upper.data[i];
}
}
fn pick_colour(&mut self, frame: core::Mat, visible: bool) -> Result<i32, String> {
let mut ret = -1;
let window = "Video Capture";
if visible {
try!(highgui::named_window(window, 1));
}
if try!(frame.size()).width == 0 {
println!("Failed to create camera frame");
ret = -999;
return Ok(ret);
}
let mut frame2 = try!(core::Mat::rect(
&frame,
core::Rect {
x: 0,
y: 0,
width: 640,
height: 80
}
));
let mut img_yuv = try!(core::Mat::new());
try!(imgproc::cvt_color(
&mut frame2,
&mut img_yuv,
imgproc::COLOR_BGR2YUV,
0
));
let line_ptr = try!(img_yuv.ptr0(40));
self.convert(line_ptr, 640);
ret = -999;
return Ok(ret);
}
fn convert(&mut self, ptr: *mut u8, length: u32) -> u32 {
unsafe {
let buf: &[u8] = std::slice::from_raw_parts(ptr, length as usize);
println!("Pixel Y{:?}, U{:?}, V{:?}", buf[320], buf[321], buf[322]);
0
}
}
fn get_frame(&mut self) -> core::Mat {
let mut frame = core::Mat::new().unwrap();
self.cam.read(&mut frame).unwrap();
return frame;
}
fn what_colour(&mut self, frame: core::Mat, visible: bool) -> Result<i32, String> {
//let now = Instant::now();
//println!("Start {:#?}",Instant::now().duration_since(now));
let mut ret = -1;
let window = "Video Capture";
if visible {
try!(highgui::named_window(window, 1));
}
let window2 = "Overlay";
if visible {
try!(highgui::named_window(window2, 1));
}
//println!("Now {:#?}",Instant::now().duration_since(now));
if try!(frame.size()).width == 0 {
println!("Failed to create camera frame");
let ret = -999;
return Ok(ret);
}
//println!("Now {:#?}",Instant::now().duration_since(now));
let mut frame2 = try!(core::Mat::rect(
&frame,
core::Rect {
x: 0,
y: 0,
width: 640,
height: 80
}
));
let mut img_hsv = try!(core::Mat::new());
//try!(imgproc::cvt_color(&mut frame2, &mut img_hsv, imgproc::COLOR_BGR2HSV, 0));
try!(imgproc::cvt_color(
&mut frame2,
&mut img_hsv,
imgproc::COLOR_BGR2YUV,
0
));
let mut img_thresholded = try!(core::Mat::new());
//println!("Now {:#?}",Instant::now().duration_since(now));
for colour in self.colours.iter() {
let mut _img_final = try!(core::Mat::new());
if *colour == self.red {
let img_lower = try!(core::Mat::new_size_with_default(
try!(img_hsv.size()),
try!(img_hsv.typ()),
self.red_lower
));
let img_upper = try!(core::Mat::new_size_with_default(
try!(img_hsv.size()),
try!(img_hsv.typ()),
self.red_upper
));
try!(core::in_range(
&mut img_hsv,
&img_lower,
&img_upper,
&mut img_thresholded
));
} else if *colour == self.green {
let img_lower = try!(core::Mat::new_size_with_default(
try!(img_hsv.size()),
try!(img_hsv.typ()),
self.green_lower
));
let img_upper = try!(core::Mat::new_size_with_default(
try!(img_hsv.size()),
try!(img_hsv.typ()),
self.green_upper
));
try!(core::in_range(
&mut img_hsv,
&img_lower,
&img_upper,
&mut img_thresholded
));
} else if *colour == self.blue {
let img_lower = try!(core::Mat::new_size_with_default(
try!(img_hsv.size()),
try!(img_hsv.typ()),
self.blue_lower
));
let img_upper = try!(core::Mat::new_size_with_default(
try!(img_hsv.size()),
try!(img_hsv.typ()),
self.blue_upper
));
try!(core::in_range(
&mut img_hsv,
&img_lower,
&img_upper,
&mut img_thresholded
));
} else if *colour == self.yellow {
let img_lower = try!(core::Mat::new_size_with_default(
try!(img_hsv.size()),
try!(img_hsv.typ()),
self.yellow_lower
));
let img_upper = try!(core::Mat::new_size_with_default(
try!(img_hsv.size()),
try!(img_hsv.typ()),
self.yellow_upper
));
try!(core::in_range(
&mut img_hsv,
&img_lower,
&img_upper,
&mut img_thresholded
));
}
let kernel = try!(imgproc::get_structuring_element(
imgproc::MORPH_ELLIPSE,
core::Size {
width: 5,
height: 5
},
core::Point { x: -1, y: -1 }
));
let border_value = try!(imgproc::morphology_default_border_value());
let mut img_dilated = try!(core::Mat::new());
let mut img_eroded = try!(core::Mat::new());
let mut img_final = try!(core::Mat::new());
//morphological opening (removes small objects from the foreground)
try!(imgproc::erode(
&mut img_thresholded,
&mut img_eroded,
&kernel,
core::Point { x: -1, y: -1 },
1,
imgproc::BORDER_CONSTANT,
border_value
));
try!(imgproc::dilate(
&mut img_eroded,
&mut img_dilated,
&kernel,
core::Point { x: -1, y: -1 },
1,
imgproc::BORDER_CONSTANT,
border_value
));
//morphological closing (removes small holes from the foreground)
try!(imgproc::dilate(
&mut img_dilated,
&mut img_eroded,
&kernel,
core::Point { x: -1, y: -1 },
1,
imgproc::BORDER_CONSTANT,
border_value
));
try!(imgproc::erode(
&mut img_eroded,
&mut img_final,
&kernel,
core::Point { x: -1, y: -1 },
1,
imgproc::BORDER_CONSTANT,
border_value
));
let result = imgproc::moments(&mut img_final, false);
assert!(result.is_ok());
if visible {
try!(highgui::imshow(window2, &mut img_final));
}
let moments = result.unwrap();
let area = moments.m00;
//println!("Area {:#?}",area);
if area > 10000f64 {
if *colour == self.red {
try!(core::rectangle(
&mut frame2,
core::Rect {
x: 0,
y: 0,
width: 30,
height: 30
},
core::Scalar {
data: [0f64, 0f64, 255f64, -1f64]
},
-1,
8,
0
));
ret = self.red;
break;
} else if *colour == self.green {
try!(core::rectangle(
&mut frame2,
core::Rect {
x: 0,
y: 0,
width: 30,
height: 30
},
core::Scalar {
data: [0f64, 255f64, 0f64, -1f64]
},
-1,
8,
0
));
ret = self.green;
break;
} else if *colour == self.blue {
try!(core::rectangle(
&mut frame2,
core::Rect {
x: 0,
y: 0,
width: 30,
height: 30
},
core::Scalar {
data: [255f64, 0f64, 0f64, -1f64]
},
-1,
8,
0
));
ret = self.blue;
break;
} else if *colour == self.yellow {
try!(core::rectangle(
&mut frame2,
core::Rect {
x: 0,
y: 0,
width: 30,
height: 30
},
core::Scalar {
data: [0f64, 255f64, 255f64, -1f64]
},
-1,
8,
0
));
ret = self.yellow;
break;
}
}
}
if visible {
try!(highgui::imshow(window, &mut frame2));
try!(highgui::wait_key(300));
}
//println!("Now {:#?}",Instant::now().duration_since(now));
Ok(ret)
}
}
pub fn build_camera() -> Camera {
let mut cam = highgui::VideoCapture::device(0).unwrap();
let red = 0;
let blue = 1;
let yellow = 2;
let green = 3;
let colours = [red, blue, yellow, green];
//let red_lower = core::Scalar {
//data: [121f64, 128f64, 50f64, -1f64],
//};
//let red_upper = core::Scalar {
//data: [188f64, 228f64, 128f64, -1f64],
//};
//let blue_lower = core::Scalar {
//data: [203f64, 40f64, 136f64, -1f64],
//};
//let blue_upper = core::Scalar {
//data: [251f64, 94f64, 191f64, -1f64],
//};
//let yellow_lower = core::Scalar {
//data: [237f64, 116f64, 128f64, -1f64],
//};
//let yellow_upper = core::Scalar {
//data: [255f64, 157f64, 163f64, -1f64],
//};
//let green_lower = core::Scalar {
//data: [197f64, 102f64, 97f64, -1f64],
//};
//let green_upper = core::Scalar {
//data: [243f64, 135f64, 143f64, -1f64],
//};
let red_lower = core::Scalar {
data: [118f64, 173f64, 84f64, -1f64],
};
let red_upper = core::Scalar {
data: [148f64, 203f64, 114f64, -1f64],
};
let blue_lower = core::Scalar {
data: [210f64, 10f64, 139f64, -1f64],
};
let blue_upper = core::Scalar {
data: [240f64, 40f64, 169f64, -1f64],
};
let yellow_lower = core::Scalar {
data: [190f64, 116f64, 114f64, -1f64],
};
let yellow_upper = core::Scalar {
data: [220f64, 146f64, 144f64, -1f64],
};
let green_lower = core::Scalar {
data: [204f64, 107f64, 104f64, -1f64],
};
let green_upper = core::Scalar {
data: [234f64, 137f64, 134f64, -1f64],
};
Camera {
cam,
red_lower,
red_upper,
green_lower,
green_upper,
blue_lower,
blue_upper,
yellow_lower,
yellow_upper,
red,
green,
blue,
yellow,
colours,
}
}
|
use std::io;
fn get_line() -> String {
let mut out = String::new();
let stdin = io::stdin();
stdin.read_line(&mut out).unwrap();
out
}
fn main() {
let input = get_line();
let mut floor: i32 = 0;
for letter in input.as_str().chars() {
if letter == '(' {
floor = floor + 1;
}
else if letter == ')' {
floor = floor - 1;
}
}
println!("Floor: {}", floor);
}
|
use std::fmt;
use super::{Condition, debug_predicates};
#[derive(PartialEq, Clone)]
pub struct SelectQuery<T: fmt::Debug> {
pub table_name: String,
pub columns: Vec<T>,
pub predicates: Option<Condition>
}
impl <T: fmt::Debug> SelectQuery<T> {
pub fn new<I: Into<String>>(table_name: I, columns: Vec<T>, predicates: Option<Condition>) -> SelectQuery<T> {
SelectQuery {
table_name: table_name.into(),
columns: columns,
predicates: predicates
}
}
}
impl <T: fmt::Debug> fmt::Debug for SelectQuery<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "statement: 'select', tables: [<name: '{}'>], columns: {:?}, where: {}", self.table_name, self.columns, debug_predicates(&self.predicates))
}
}
|
use crate::errors::ResT;
use crate::hbs::{PrjCreateData, PmvdData};
use reqwest::{Client, RequestBuilder, StatusCode};
use handlebars::Handlebars;
use xml::escape::escape_str_attribute;
use std::time::Duration;
use std::io::{Error, ErrorKind};
pub fn get_client() -> ResT<Client> {
Ok(Client::builder()
.timeout(Duration::from_secs(5 * 60))
.build()?)
}
//&'static str
const RES_STATUS: &str = "<return><status>";
const RES_STATUS_LEN: usize = 16; // RES_STATUS.len();
const RES_STATUS_SUCCESS: &str = "SUCCESS";
const RES_STATUS_SUCCESS_LEN: usize = 7; // RES_STATUS_SUCCESS.len();
fn get_return_status(response_body: &String) -> ResT<String> {
let idx: usize = response_body.find(RES_STATUS).ok_or_else(|| Error::new(ErrorKind::Other, "Status start"))? + RES_STATUS_LEN;
let result: String = response_body.get(idx..(idx + RES_STATUS_SUCCESS_LEN)).ok_or_else(|| Error::new(ErrorKind::Other, "Status get"))?.to_string();
Ok(result)
}
fn post_wsdl(client: &Client, name: &str) -> RequestBuilder {
// ? "text/xml"
let url = format!("http://localhost:18080/cbapi/{}?wsdl", name);
client.post(&url)
.basic_auth("admin", Some("admin"))
// ? "text/xml"
}
pub fn prj_create(client: &Client, hbs: &Handlebars, lang_id: &str, prj_name: &str) -> ResT<String> {
let req_data = PrjCreateData { lang_id: lang_id.to_string(), prj_name: prj_name.to_string(), };
let req_body = hbs.render("prjCreate", &req_data)?;
// println!("prjCreate request body:\n {}", req_body);
let mut resp = post_wsdl(client, "project")
.body(req_body)
.send()?;
assert!(resp.status() == StatusCode::OK);
let resp_text = resp.text()?;
let result_status = get_return_status(&resp_text)?;
assert!(result_status == RES_STATUS_SUCCESS);
Ok(resp_text)
}
pub fn pmvd(client: &Client, hbs: &Handlebars, prj_name: &str, verbatim_text: &str) -> ResT<String> {
let verb_text: String = escape_str_attribute(verbatim_text).to_string();
let req_data = PmvdData { prj_name: prj_name.to_string(), verbatim_text: verb_text };
let req_body = hbs.render("pmvd", &req_data)?;
let mut resp = post_wsdl(client, "realtime")
.body(req_body.to_owned())
.send()?;
let resp_text = resp.text()?;
if resp.status() != StatusCode::OK {
println!("status: {}", resp.status());
println!("pmvd request body:\n {}", &req_body);
println!("resp_text:\n {}", resp_text);
assert!(resp.status() == StatusCode::OK);
}
let result_status = get_return_status(&resp_text)?;
if result_status != RES_STATUS_SUCCESS {
println!("result status: {}", result_status);
println!("pmvd request body:\n {}", &req_body);
println!("resp_text:\n {}", resp_text);
assert!(result_status == RES_STATUS_SUCCESS);
}
Ok(resp_text)
}
|
extern crate skim;
use skim::prelude::*;
use std::io::Cursor;
/// `nth` option is supported by SkimItemReader.
/// In the example below, with `nth=2` set, only `123` could be matched.
pub fn main() {
let input = "foo 123";
let options = SkimOptionsBuilder::default().query(Some("f")).build().unwrap();
let item_reader = SkimItemReader::new(SkimItemReaderOption::default().nth("2").build());
let items = item_reader.of_bufread(Cursor::new(input));
let selected_items = Skim::run_with(&options, Some(items))
.map(|out| out.selected_items)
.unwrap_or_else(Vec::new);
for item in selected_items.iter() {
println!("{}", item.output());
}
}
|
#![cfg_attr(feature = "cargo-clippy", allow(clippy::field_reassign_with_default))]
#![recursion_limit = "128"]
#[macro_use]
extern crate lazy_static;
#[macro_use]
extern crate serde_json;
#[macro_use]
extern crate more_asserts;
#[macro_use]
mod common;
mod test_code_search;
mod test_phrase;
mod test_query_generator;
mod test_scores;
mod test_why_found;
mod tests;
mod tests_facet;
mod tests_large;
mod tests_minimal;
|
extern crate log;
extern crate bitflags;
extern crate rust_embed;
pub mod analysis;
pub mod arch;
pub mod basicblock;
pub mod flowmeta;
pub mod loader;
pub mod loaders;
pub mod util;
pub mod workspace;
pub mod xref;
pub mod pagemap;
pub use basicblock::BasicBlock;
pub use xref::Xref;
pub use workspace::Workspace;
// helpers that are useful during doctests, tests.
//#[cfg(feature="test")]
pub mod test;
pub mod rsrc;
|
#[derive(Copy, Clone, FromPrimitive, Hash, PartialEq, Eq, Debug)]
pub enum Operator {
Add,
RAdd,
Subtract,
RSubtract,
USubtract,
Multiply,
RMultiply,
Divide,
RDivide,
FloorDiv,
RFloorDiv,
Power,
RPower,
Equals,
REquals,
NotEquals,
RNotEquals,
GreaterThan,
RGreaterThan,
LessThan,
RLessThan,
GreaterEqual,
RGreaterEqual,
LessEqual,
RLessEqual,
LeftBitshift,
RLeftBitshift,
RightBitshift,
RRightBitshift,
BitwiseAnd,
RBitwiseAnd,
BitwiseOr,
RBitwiseOr,
BitwiseXor,
RBitwiseXor,
BitwiseNot,
Modulo,
RModulo,
GetAttr,
GetSlice,
SetAttr,
SetSlice,
Call,
Compare,
RCompare,
Iter,
IterSlice,
New,
In,
Missing,
Del,
DelAttr,
DelSlice,
Str,
Repr,
Bool,
Int,
Reversed,
Hash,
Enter,
Exit,
}
impl Operator {
pub fn name(self) -> &'static str {
match self {
Operator::Add => "operator +",
Operator::RAdd => "operator r+",
Operator::Subtract => "operator -",
Operator::RSubtract => "operator r-",
Operator::USubtract => "operator u-",
Operator::Multiply => "operator *",
Operator::RMultiply => "operator r*",
Operator::Divide => "operator /",
Operator::RDivide => "operator r/",
Operator::FloorDiv => "operator //",
Operator::RFloorDiv => "operator r//",
Operator::Power => "operator **",
Operator::RPower => "operator r**",
Operator::Equals => "operator ==",
Operator::REquals => "operator r==",
Operator::NotEquals => "operator !=",
Operator::RNotEquals => "operator r!=",
Operator::GreaterThan => "operator >",
Operator::RGreaterThan => "operator r>",
Operator::LessThan => "operator <",
Operator::RLessThan => "operator r<",
Operator::GreaterEqual => "operator >=",
Operator::RGreaterEqual => "operator r>=",
Operator::LessEqual => "operator <=",
Operator::RLessEqual => "operator r<=",
Operator::LeftBitshift => "operator <<",
Operator::RLeftBitshift => "operator r<<",
Operator::RightBitshift => "operator >>",
Operator::RRightBitshift => "operator r>>",
Operator::BitwiseAnd => "operator &",
Operator::RBitwiseAnd => "operator r&",
Operator::BitwiseOr => "operator |",
Operator::RBitwiseOr => "operator r|",
Operator::BitwiseXor => "operator ^",
Operator::RBitwiseXor => "operator r^",
Operator::BitwiseNot => "operator ~",
Operator::Modulo => "operator %",
Operator::RModulo => "operator r%",
Operator::GetAttr => "operator []",
Operator::GetSlice => "operator [:]",
Operator::SetAttr => "operator []=",
Operator::SetSlice => "operator [:]=",
Operator::Call => "operator ()",
Operator::Compare => "operator <=>",
Operator::RCompare => "operator r<=>",
Operator::Iter => "operator iter",
Operator::IterSlice => "operator iter[:]",
Operator::New => "operator new",
Operator::In => "operator in",
Operator::Missing => "operator missing",
Operator::Del => "operator del",
Operator::DelAttr => "operator del[]",
Operator::DelSlice => "operator del[:]",
Operator::Str => "operator str",
Operator::Repr => "operator repr",
Operator::Bool => "operator bool",
Operator::Int => "operator int",
Operator::Reversed => "operator reversed",
Operator::Hash => "operator hash",
Operator::Enter => "operator enter",
Operator::Exit => "operator exit",
}
}
}
|
//! Network channels
use crate::jsonrpc;
use std::fmt::{self, Display};
/// Individual network channel (from `/net_info`)
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Channel {
/// Channel ID
#[serde(rename = "ID")]
pub id: Id,
/// Capacity of the send queue
#[serde(
rename = "SendQueueCapacity",
serialize_with = "jsonrpc::serialize_u64_string",
deserialize_with = "jsonrpc::deserialize_u64_string"
)]
pub send_queue_capacity: u64,
/// Size of the send queue
#[serde(
rename = "SendQueueSize",
serialize_with = "jsonrpc::serialize_u64_string",
deserialize_with = "jsonrpc::deserialize_u64_string"
)]
pub send_queue_size: u64,
/// Priority value
#[serde(
rename = "Priority",
serialize_with = "jsonrpc::serialize_u64_string",
deserialize_with = "jsonrpc::deserialize_u64_string"
)]
pub priority: u64,
/// Amount of data recently sent
#[serde(
rename = "RecentlySent",
serialize_with = "jsonrpc::serialize_u64_string",
deserialize_with = "jsonrpc::deserialize_u64_string"
)]
pub recently_sent: u64,
}
/// Channel IDs
#[derive(Copy, Clone, Debug, Deserialize, Serialize)]
pub struct Id(pub u64);
impl Id {
/// Get the current voting power as an integer
pub fn value(self) -> u64 {
self.0
}
}
impl From<Id> for u64 {
fn from(id: Id) -> u64 {
id.value()
}
}
impl From<u64> for Id {
fn from(id: u64) -> Id {
Id(id)
}
}
/// Channels (from `/status`)
#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]
pub struct Channels(String);
impl Display for Channels {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
|
use num::integer::{binomial, div_ceil, gcd, mod_floor, Integer, Roots};
fn main() {
// 整数平方根 √target-1 < ans <= √target < ans+1
println!("{}, {}, {}", 24.sqrt(), 25.sqrt(), 26.sqrt());
// 最大公約数
println!("{}", gcd(24, 36));
// 必ず非負のmod
println!("{} {}", (-34i64).rem_euclid(7), mod_floor(-34, 7));
// 不足注目の割り算の商
println!("{}", div_ceil(17, 6));
// 不足(-m < ans <= 0)
println!(
"{},{}",
17 - 6 * div_ceil(17, 6),
17 - 17.next_multiple_of(&6)
);
// あまりを捨てる、不足を埋める
// num::integer::Integer をuseする
println!("{}, {}", 17.prev_multiple_of(&10), 17.next_multiple_of(&10));
// nCr
println!("{}", binomial(5, 3));
// popcount
println!("{}", 7u64.count_ones());
// Bezoutの等式
// num::integer::Integer をuseする
let ans = isize::extended_gcd(&6, &10);
println!("6*{} + 10*{} = {}", ans.x, ans.y, ans.gcd);
}
|
use crate::inputs::Cursor;
use crate::util::{
AnchorEdge, Bezier, BoundingBoxQuad, ControlPointQuad, EndpointQuad, FollowBezierAnimation,
Globals, GrandParent, Group, GroupMiddleQuad, MiddlePointQuad, MyShader, TurnRoundAnimation,
UiAction, UiBoard,
};
use bevy::prelude::*;
pub fn move_ui(
cursor: ResMut<Cursor>,
mut ui_query: Query<(&mut Transform, &mut UiBoard), With<GrandParent>>,
) {
for (mut transform, ui_board) in ui_query.iter_mut() {
//
if ui_board.action == UiAction::MovingUi {
//
let z_pos = transform.translation.z;
transform.translation =
ui_board.previous_position.extend(z_pos) + cursor.pos_relative_to_click.extend(0.0);
}
}
}
pub fn move_middle_quads(
time: Res<Time>,
bezier_curves: ResMut<Assets<Bezier>>,
mut my_shader_params: ResMut<Assets<MyShader>>,
mut query: Query<
(&mut GlobalTransform, &Handle<Bezier>, &Handle<MyShader>),
With<MiddlePointQuad>,
>,
globals: ResMut<Globals>,
) {
let number_of_bezier_curves = bezier_curves.len();
let num_points = globals.num_points_on_curve + 1;
let vrange: Vec<f32> = (0..num_points * number_of_bezier_curves)
.map(|x| ((x) as f32) / (num_points as f32 - 1.0))
.collect();
for (handle_id, bezier) in bezier_curves.iter() {
//
let curve = bezier.to_curve();
for ((mut transform, bezier_handle, shader_params_handle), t) in
query.iter_mut().zip(vrange.clone())
{
if handle_id == bezier_handle.id {
//
let mut shader_params = my_shader_params.get_mut(shader_params_handle).unwrap();
let t_time = (t as f64 + time.seconds_since_startup() * 0.1) % 1.0;
shader_params.t = t_time as f32;
// let idx_f64 = t_time * (bezier.lut.len() - 1) as f64;
// let p1 = bezier.lut[(idx_f64 as usize)];
// let p2 = bezier.lut[idx_f64 as usize + 1];
// //
// // TODO: is the minus one useful here?
// let rem = (idx_f64 - 1.0) % 1.0;
// let t_distance = interpolate(p1, p2, rem);
use flo_curves::bezier::BezierCurve;
let t_distance = bezier.compute_real_distance(t_time);
let pos = curve.point_at_pos(t_distance);
transform.translation.x = pos.0 as f32;
transform.translation.y = pos.1 as f32;
}
}
}
}
pub fn move_group_middle_quads(
time: Res<Time>,
bezier_curves: ResMut<Assets<Bezier>>,
mut my_shader_params: ResMut<Assets<MyShader>>,
mut query: Query<(
&mut GlobalTransform,
&Handle<Group>,
&Handle<MyShader>,
&GroupMiddleQuad,
)>,
// globals: ResMut<Globals>,
groups: ResMut<Assets<Group>>,
) {
let mut t = 0.0;
// println!("START:");
if let Some(last_handle_tuple) = groups.iter().next() {
let mut last_handle_id = last_handle_tuple.0;
for (mut transform, group_handle, shader_params_handle, GroupMiddleQuad(num_quads)) in
query.iter_mut()
{
if group_handle.id != last_handle_id {
t = 0.0;
last_handle_id = group_handle.id;
}
t = t + 1.0 / (num_quads.clone() as f32);
let mut shader_params = my_shader_params.get_mut(shader_params_handle).unwrap();
// println!("groups handle: {:?}", group_handle);
let group = groups.get(group_handle).unwrap();
let t_time = (t as f64 + time.seconds_since_startup() * 0.02) % 1.0;
shader_params.t = t_time as f32;
// println!("time: {:?}", t_time);
let pos = group.compute_position_with_bezier(&bezier_curves, t_time);
// let pos = group.compute_position_with_lut(t_time as f32);
transform.translation.x = pos.x;
transform.translation.y = pos.y;
}
}
}
pub fn move_bb_quads(
mut bezier_curves: ResMut<Assets<Bezier>>,
mut query: Query<(
&mut GlobalTransform,
&Handle<Bezier>,
&Handle<Mesh>,
&Handle<MyShader>,
&BoundingBoxQuad,
)>,
mut meshes: ResMut<Assets<Mesh>>,
mut my_shader_params: ResMut<Assets<MyShader>>,
) {
for (mut transform, bezier_handle, mesh_handle, shader_params_handle, _bbquad) in
query.iter_mut()
{
let bezier = bezier_curves.get_mut(bezier_handle).unwrap();
let mesh = meshes.get_mut(mesh_handle).unwrap();
let mut shader_params = my_shader_params.get_mut(shader_params_handle).unwrap();
let (bound0, bound1) = bezier.bounding_box();
// makes the quad bigger than the bounding box so that we can have smooth edges made in the shader
let bigger_size = (bound1 - bound0) * 1.1;
let bb_pos = (bound1 + bound0) / 2.0;
// println!("{:?}, {:?}", bb_size,);
transform.translation = bb_pos.extend(transform.translation.z);
shader_params.size = bigger_size;
// TODO: change the transform scale instead of the mesh
let new_mesh = Mesh::from(shape::Quad {
size: bigger_size,
flip: false,
});
*mesh = new_mesh;
}
}
pub fn move_end_quads(
mut bezier_curves: ResMut<Assets<Bezier>>,
mut query: Query<(&mut GlobalTransform, &Handle<Bezier>, &EndpointQuad)>,
globals: Res<Globals>,
) {
for (mut transform, bezier_handle, endpoint_quad_id) in query.iter_mut() {
//
let EndpointQuad(point) = endpoint_quad_id;
//
// checks whether the transforms are equal to the positions in the Bezier data structure
if let Some(bezier) = bezier_curves.get_mut(bezier_handle) {
if (*point == AnchorEdge::Start
&& transform.translation.truncate() != bezier.positions.start)
|| (*point == AnchorEdge::End
&& transform.translation.truncate() != bezier.positions.end)
{
let ((start_displacement, end_displacement), (start_rotation, end_rotation)) =
bezier.ends_displacement(globals.scale);
if *point == AnchorEdge::Start {
transform.translation = (bezier.positions.start + start_displacement)
.extend(transform.translation.z);
transform.rotation = start_rotation;
} else {
transform.translation =
(bezier.positions.end + end_displacement).extend(transform.translation.z);
transform.rotation = end_rotation;
}
}
}
}
}
pub fn move_control_quads(
mut bezier_curves: ResMut<Assets<Bezier>>,
mut query: Query<(&mut GlobalTransform, &Handle<Bezier>, &ControlPointQuad)>,
) {
for (mut transform, bezier_handle, ctr_pt_id) in query.iter_mut() {
let ControlPointQuad(point) = ctr_pt_id;
//
if let Some(bezier) = bezier_curves.get_mut(bezier_handle) {
//
let (_axis, quad_angle) = transform.rotation.to_axis_angle();
let control_point: Vec2;
let anchor_point: Vec2;
let constant_angle: f32;
if *point == AnchorEdge::Start {
control_point = bezier.positions.control_start;
anchor_point = bezier.positions.start;
constant_angle = std::f32::consts::PI / 2.0;
} else {
control_point = bezier.positions.control_end;
anchor_point = bezier.positions.end;
constant_angle = std::f32::consts::PI / 2.0;
}
let relative_position: Vec2 = control_point - anchor_point;
let bezier_angle: f32 = relative_position.y.atan2(relative_position.x);
let bezier_angle_90: f32 = bezier_angle + constant_angle;
let offset: bool = transform.translation.truncate() != control_point;
let rotated: bool = !((quad_angle.abs() - bezier_angle_90.abs()).abs() < 0.01);
// if the quad's translation and rotation are not equal to the corresponding control point, fix them
if offset || rotated {
transform.translation = control_point.extend(transform.translation.z);
transform.rotation = Quat::from_rotation_z(bezier_angle_90);
}
}
}
}
////////// helicopter animation
//
// // animates the helicopter blades
pub fn turn_round_animation(mut query: Query<(&mut Transform, &TurnRoundAnimation)>) {
for (mut transform, _) in query.iter_mut() {
let quat = Quat::from_rotation_z(0.2);
transform.rotate(quat);
}
}
////////// helicopter animation
//
// // moves the helicopter along the Group path
pub fn follow_bezier_group(
mut query: Query<(&mut Transform, &FollowBezierAnimation)>,
mut visible_query: Query<
&mut Visible,
Or<(With<FollowBezierAnimation>, With<TurnRoundAnimation>)>,
>,
groups: Res<Assets<Group>>,
curves: ResMut<Assets<Bezier>>,
time: Res<Time>,
) {
if let Some(group) = groups.iter().next() {
for mut visible in visible_query.iter_mut() {
visible.is_visible = true;
}
for (mut transform, bezier_animation) in query.iter_mut() {
let path_length = group.1.standalone_lut.path_length as f64;
let multiplier: f64 = 500.0 / path_length;
let t_time = (bezier_animation.animation_offset
+ time.seconds_since_startup() * (0.1 * multiplier))
% 1.0;
let mut pos = group.1.compute_position_with_lut(t_time as f32);
let road_line_offset = 4.0;
let normal = group
.1
.compute_normal_with_bezier(&curves, t_time as f64)
.normalize();
pos += normal * road_line_offset;
transform.translation.x = pos.x;
transform.translation.y = pos.y;
// the car looks ahead (5% of the curve length) to orient itself
let further_pos = group
.1
.compute_position_with_lut(((t_time + 0.05 * multiplier) % 1.0) as f32);
let further_normal = group
.1
.compute_normal_with_bezier(&curves, ((t_time + 0.05 * multiplier) % 1.0) as f64)
.normalize();
let forward_direction =
(further_pos + further_normal * road_line_offset - pos).normalize();
// let initial_rot = Quat::from_rotation_x(std::f32::consts::FRAC_PI_2);
// let forward_direction = initial_rot.mul_vec3(forward_direction0.extend(0.0));
let mut current_looking_dir = transform
.rotation
.mul_vec3(bezier_animation.initial_direction);
current_looking_dir.z = 0.0;
let quat = Quat::from_rotation_arc(current_looking_dir, forward_direction.extend(0.0));
let (axis, mut angle) = quat.to_axis_angle();
// println!(
// "current_looking_dir: {:?}, forward_direction: {:?}",
// current_looking_dir, forward_direction
// );
// maximum rotating speed
angle = angle.clamp(0.0, 3.0 * std::f32::consts::PI / 180.0);
let clamped_quat = Quat::from_axis_angle(axis, angle);
transform.rotation = clamped_quat.mul_quat(transform.rotation);
}
}
}
|
use super::{BoltStructure, Single, Value};
use crate::{
constants::STRUCTURE_NAME,
error::{SerdeError, SerdeResult},
};
use serde::{
de, forward_to_deserialize_any,
ser::{self, SerializeTupleStruct},
};
use std::{collections::HashMap, fmt};
#[derive(Debug, PartialEq)]
pub struct Success {
pub metadata: HashMap<String, Value>,
}
impl BoltStructure for Success {
const SIG: u8 = 0x70;
const LEN: u8 = 0x01;
const SERIALIZE_LEN: usize = serialize_length!(Self::SIG, Self::LEN);
type Fields = Single<HashMap<String, Value>>;
fn into_value(self) -> Value {
value_map! {
"metadata" => Value::Map(self.metadata),
}
}
}
impl fmt::Display for Success {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Success").field(&self.metadata).finish()
}
}
impl ser::Serialize for Success {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
let mut ts_serializer =
serializer.serialize_tuple_struct(STRUCTURE_NAME, Self::SERIALIZE_LEN)?;
ts_serializer.serialize_field(&self.metadata)?;
ts_serializer.end()
}
}
impl<'de> de::Deserialize<'de> for Success {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
deserializer.deserialize_map(SuccessVisitor)
}
}
struct SuccessVisitor;
impl<'de> de::Visitor<'de> for SuccessVisitor {
type Value = Success;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Success")
}
fn visit_map<V>(self, mut map_access: V) -> Result<Self::Value, V::Error>
where
V: de::MapAccess<'de>,
{
let fields = structure_access!(map_access, Success);
Ok(Success {
metadata: fields.value(),
})
}
}
impl<'de> de::Deserializer<'de> for Success {
type Error = SerdeError;
fn deserialize_any<V>(self, visitor: V) -> SerdeResult<V::Value>
where
V: de::Visitor<'de>,
{
self.into_value().deserialize_map(visitor)
}
forward_to_deserialize_any! {
bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
bytes byte_buf option unit unit_struct newtype_struct seq tuple
tuple_struct map struct identifier enum ignored_any
}
}
|
use std::{cell::RefCell, rc::Rc};
use crate::sqlite::{
sqlite_resource_pool::{db_pool::SqliteDatabasePool, tx_pool::SqliteTxPool},
transaction::sqlite_tx::SqliteTx,
};
use apllodb_shared_components::SessionId;
use apllodb_storage_engine_interface::WithDbMethods;
use futures::FutureExt;
use super::BoxFutRes;
#[derive(Clone, Debug, Default)]
pub struct WithDbMethodsImpl {
db_pool: Rc<RefCell<SqliteDatabasePool>>,
tx_pool: Rc<RefCell<SqliteTxPool>>,
}
impl WithDbMethodsImpl {
pub(crate) fn new(
db_pool: Rc<RefCell<SqliteDatabasePool>>,
tx_pool: Rc<RefCell<SqliteTxPool>>,
) -> Self {
Self { db_pool, tx_pool }
}
}
impl WithDbMethods for WithDbMethodsImpl {
fn begin_transaction_core(self, sid: SessionId) -> BoxFutRes<()> {
async move {
let db_pool = self.db_pool.borrow();
let db = db_pool.get_db(&sid)?;
let tx = SqliteTx::begin(db).await?;
self.tx_pool.borrow_mut().insert_tx(&sid, tx)?;
Ok(())
}
.boxed_local()
}
}
|
/*
Copyright (c) 2016-2017, Robert Ou <rqou@robertou.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
use analyzer::ast::*;
use analyzer::design::*;
use analyzer::identifier::*;
use analyzer::objpools::*;
use analyzer::util::*;
use parser::*;
use std::collections::HashSet;
use std::ffi::OsStr;
pub struct AnalyzerCoreStateBlob {
///// This stuff is global
// Object pools
pub sp: StringPool,
pub op_n: ObjPool<AstNode>,
pub op_s: ObjPool<Scope>,
pub op_sc: ObjPool<ScopeChainNode>,
pub op_l: ObjPool<Library>,
pub design_db: DesignDatabase,
pub errors: String,
pub warnings: String,
///// This stuff is local to some part of the parsing step
internal_name_count: u64,
work_lib: Option<ObjPoolIndex<Library>>,
current_file_name: Option<StringPoolIndexOsStr>,
innermost_scope: Option<ObjPoolIndex<ScopeChainNode>>,
blacklisted_names: HashSet<ScopeItemName>,
}
impl AnalyzerCoreStateBlob {
pub fn new() -> AnalyzerCoreStateBlob {
AnalyzerCoreStateBlob {
sp: StringPool::new(),
op_n: ObjPool::new(),
op_s: ObjPool::new(),
op_sc: ObjPool::new(),
op_l: ObjPool::new(),
design_db: DesignDatabase::new(),
errors: String::new(),
warnings: String::new(),
internal_name_count: 0,
work_lib: None,
current_file_name: None,
innermost_scope: None,
blacklisted_names: HashSet::new(),
}
}
}
fn dump_current_location(s: &mut AnalyzerCoreStateBlob, pt: &VhdlParseTreeNode,
is_err: bool) {
let o = if is_err { &mut s.errors } else { &mut s.warnings };
*o += &format!("{}:",
s.sp.retrieve_osstr(s.current_file_name.unwrap()).to_string_lossy());
if pt.first_line != -1 {
*o += &format!("{}:{}:", pt.first_line, pt.first_column);
}
}
fn pt_loc(s: &AnalyzerCoreStateBlob, pt: &VhdlParseTreeNode) -> SourceLoc {
SourceLoc {
first_line: pt.first_line,
first_column: pt.first_column,
last_line: pt.last_line,
last_column: pt.last_column,
file_name: s.current_file_name,
}
}
// XXX FIXME: We .unwrap() all identifiers because the parser already rejects
// invalid ones. This is a somewhat ugly duplication of effort.
fn analyze_identifier(s: &mut AnalyzerCoreStateBlob, pt: &VhdlParseTreeNode)
-> Identifier {
match pt.node_type {
ParseTreeNodeType::PT_BASIC_ID => {
let sp_idx = s.sp.add_latin1_str(&pt.str1);
Identifier::new_latin1(&mut s.sp, sp_idx, false).unwrap()
},
ParseTreeNodeType::PT_EXT_ID => {
let sp_idx = s.sp.add_latin1_str(&pt.str1);
Identifier::new_latin1(&mut s.sp, sp_idx, true).unwrap()
},
_ => panic!("Don't know how to handle this parse tree node!")
}
}
#[derive(Eq)]
struct ParameterResultTypeProfile {
result: Option<ObjPoolIndex<AstNode>>,
params: Vec<ObjPoolIndex<AstNode>>,
}
impl PartialEq for ParameterResultTypeProfile {
fn eq(&self, other: &ParameterResultTypeProfile) -> bool {
// Compare arg count
if self.params.len() != other.params.len() {
return false;
}
// Compare args
for i in 0..self.params.len() {
if self.params[i] != other.params[i] {
return false;
}
}
// Compare return type
if self.result != other.result {
return false;
}
true
}
}
fn get_parameter_result_type_profile(s: &AnalyzerCoreStateBlob,
node_idx: ObjPoolIndex<AstNode>) -> ParameterResultTypeProfile {
let node = s.op_n.get(node_idx);
match node {
&AstNode::EnumerationLitDecl {corresponding_type_decl, ..} => {
// Treat this as a function that takes no arguments and returns the
// type of the corresponding enum.
// TODO: subtypes
ParameterResultTypeProfile {
result: Some(corresponding_type_decl),
params: vec![],
}
}
_ =>
panic!("Don't know how to get parameter/result type profile here!")
}
}
fn try_add_declaration(s: &mut AnalyzerCoreStateBlob, name: ScopeItemName,
node: ObjPoolIndex<AstNode>, scope: ObjPoolIndex<Scope>) -> bool {
// This seems to be needed because the borrow checker doesn't seem to
// understand that the None case shouldn't need the borrow still.
let has_existing = s.op_s.get(scope).get(name).is_some();
if !has_existing {
// We are adding a new thing; this should always work
s.op_s.get_mut(scope).add(name, node);
true
} else {
let found_conflict = {
let existing = s.op_s.get(scope).get(name).unwrap();
// Are the existing things overloadable? (Must always be the same)
let existing_overloadable =
s.op_n.get(existing[0]).is_an_overloadable_decl();
let this_overloadable =
s.op_n.get(node).is_an_overloadable_decl();
// TODO: Implicit declarations?
if !(existing_overloadable && this_overloadable) {
// Definitely fail
true
} else {
// Both are overloadable
// Check the type profiles
let typeprof_of_new =
get_parameter_result_type_profile(s, node);
let mut found_conflict = false;
for existing_i in existing {
let typeprof_of_existing =
get_parameter_result_type_profile(s, *existing_i);
if typeprof_of_new == typeprof_of_existing {
found_conflict = true;
break;
}
}
found_conflict
}
};
if !found_conflict {
s.op_s.get_mut(scope).add(name, node);
true
} else {
false
}
}
}
fn analyze_enum_lit(s: &mut AnalyzerCoreStateBlob,
lit_pt: &VhdlParseTreeNode, scope: ObjPoolIndex<Scope>, idx: &mut i64,
e_: ObjPoolIndex<AstNode>, pt_for_loc: &VhdlParseTreeNode) -> bool {
let lit = match lit_pt.node_type {
ParseTreeNodeType::PT_LIT_CHAR =>
EnumerationLiteral::CharLiteral(lit_pt.chr),
_ => EnumerationLiteral::Identifier(analyze_identifier(
s, lit_pt))
};
let x_ = s.op_n.alloc();
{
let x = s.op_n.get_mut(x_);
*x = AstNode::EnumerationLitDecl {
lit: lit,
idx: *idx,
corresponding_type_decl: e_,
};
}
// Done setting up, try to add it
if !try_add_declaration(s, lit.to_scope_item_name(), x_, scope) {
dump_current_location(s, pt_for_loc, true);
s.errors += &format!(
"ERROR: Duplicate declaration of enum literal {}!\n",
match lit {
EnumerationLiteral::Identifier(id) =>
s.sp.retrieve_latin1_str(id.orig_name).pretty_name(),
EnumerationLiteral::CharLiteral(c) =>
get_chr_escaped(c),
});
return false;
}
// Add it to the type decl as well
{
let e = s.op_n.get_mut(e_);
match e {
&mut AstNode::EnumerationTypeDecl{ref mut literals, ..} => {
literals.push(x_);
},
_ => panic!("AST invariant violated!")
}
}
true
}
fn analyze_enum_lits(s: &mut AnalyzerCoreStateBlob,
lit_pt: &VhdlParseTreeNode, scope: ObjPoolIndex<Scope>, idx: &mut i64,
e_: ObjPoolIndex<AstNode>, pt_for_loc: &VhdlParseTreeNode) -> bool {
match lit_pt.node_type {
ParseTreeNodeType::PT_ENUM_LITERAL_LIST => {
if !analyze_enum_lits(s, &lit_pt.pieces[0].as_ref().unwrap(),
scope, idx, e_, pt_for_loc) {
return false;
}
(*idx) += 1;
analyze_enum_lit(s, &lit_pt.pieces[1].as_ref().unwrap(),
scope, idx, e_, pt_for_loc)
},
_ => {
analyze_enum_lit(s, lit_pt, scope, idx, e_, pt_for_loc)
},
}
}
fn analyze_type_decl(s: &mut AnalyzerCoreStateBlob,
pt: &VhdlParseTreeNode, scope: ObjPoolIndex<Scope>) -> bool {
let id = analyze_identifier(s, &pt.pieces[0].as_ref().unwrap());
let typedef_pt = pt.pieces[1].as_ref().unwrap();
match typedef_pt.node_type {
ParseTreeNodeType::PT_ENUMERATION_TYPE_DEFINITION => {
// The main declaration
let loc = pt_loc(s, pt);
let d_ = s.op_n.alloc();
{
let d = s.op_n.get_mut(d_);
*d = AstNode::EnumerationTypeDecl {
loc: loc,
id: id,
literals: Vec::new(),
};
}
if !try_add_declaration(s, ScopeItemName::Identifier(id),
d_, scope) {
dump_current_location(s, pt, true);
s.errors += &format!(
"ERROR: Duplicate declaration of type {}!\n",
s.sp.retrieve_latin1_str(id.orig_name).pretty_name());
return false;
}
// The literals
let mut idx: i64 = 0;
let lits_ok = analyze_enum_lits(s,
&typedef_pt.pieces[0].as_ref().unwrap(), scope, &mut idx,
d_, pt);
if !lits_ok {
return false;
}
},
_ => panic!("Don't know how to handle this parse tree node!")
}
true
}
fn walk_scope_chain(s: &mut AnalyzerCoreStateBlob, item: ScopeItemName)
-> Option<Vec<ObjPoolIndex<AstNode>>> {
let mut ret = vec![];
let mut cur_scope_node = s.innermost_scope;
let mut looking_for_overloadable = false;
let mut first_time = true;
let mut used_param_result_type_profiles = vec![];
while cur_scope_node.is_some() {
let cur_scope_node_ = s.op_sc.get(cur_scope_node.unwrap());
if let &ScopeChainNode::X{this_scope, parent} = cur_scope_node_ {
// Try to find the thing
if let Some(maybe_found_thing) = s.op_s.get(this_scope).get(item) {
// We potentially found some things
// TODO: we need to handle aliases
// This should always be true
assert!(maybe_found_thing.len() > 0);
if first_time {
// Everything found must either all be overloadable
// or none can be overloadable (it cannot be mixed)
looking_for_overloadable = s.op_n.get(maybe_found_thing[0])
.is_an_overloadable_decl();
first_time = false;
}
if !looking_for_overloadable {
// We are looking for a non-overloadable thing, and we
// have Definitely found at least one. This means that
// we have found what we are looking for, and we are done!
assert!(maybe_found_thing.len() == 1);
ret.push(maybe_found_thing[0]);
break;
} else {
// We are looking for overloadable things. Anything that
// isn't already shadowed is good
for &maybe_found_i in maybe_found_thing {
let this_param_result_profile =
get_parameter_result_type_profile(s,
maybe_found_i);
let mut matched = false;
for other_param_result_profile in
&used_param_result_type_profiles {
if this_param_result_profile ==
*other_param_result_profile {
matched = true;
break;
}
}
if !matched {
// We can add it
ret.push(maybe_found_i);
used_param_result_type_profiles.push(
this_param_result_profile);
}
}
}
}
cur_scope_node = parent;
} else {
panic!("AST invariant violated!")
}
}
if ret.len() > 0 {
Some(ret)
} else {
None
}
}
fn analyze_designator(s: &mut AnalyzerCoreStateBlob, pt: &VhdlParseTreeNode)
-> ScopeItemName {
match pt.node_type {
ParseTreeNodeType::PT_BASIC_ID | ParseTreeNodeType::PT_EXT_ID => {
ScopeItemName::Identifier(analyze_identifier(s, pt))
},
ParseTreeNodeType::PT_LIT_STRING => {
let string_idx = {
s.sp.add_latin1_str(&pt.str1)
};
ScopeItemName::StringLiteral(string_idx)
},
ParseTreeNodeType::PT_LIT_CHAR => {
ScopeItemName::CharLiteral(pt.chr)
},
_ => panic!("Don't know how to handle this parse tree node!")
}
}
// The point of these functions is to do _only_ scope and visibility logic
// (not overloading). There are three cases: overloadable (can get multiple),
// non-overloadable and a single thing ("normal"), and non-overloadable and
// multiple things (e.g. use clauses)
// TODO: Braindump better
fn analyze_name(s: &mut AnalyzerCoreStateBlob, pt: &VhdlParseTreeNode)
-> Option<Vec<ObjPoolIndex<AstNode>>> {
match pt.node_type {
// simple_name, operator_symbol, or character_literal
ParseTreeNodeType::PT_BASIC_ID | ParseTreeNodeType::PT_EXT_ID |
ParseTreeNodeType::PT_LIT_STRING | ParseTreeNodeType::PT_LIT_CHAR => {
let designator = analyze_designator(s, pt);
// FIXME: I'm not sure this exactly follows the rules of the spec.
// However, I don't know if it is noticeable or not. I think
// that any time this rule can apply, the result can never be
// something overloadable.
if s.blacklisted_names.contains(&designator) {
return None;
}
walk_scope_chain(s, designator)
},
// selected_name
ParseTreeNodeType::PT_NAME_SELECTED => {
// First figure out what the prefix is
let prefix = analyze_name(s, &pt.pieces[0].as_ref().unwrap());
if prefix.is_none() {
return None;
}
let prefix = prefix.unwrap();
let suffix_pt = &pt.pieces[1].as_ref().unwrap();
if prefix.len() == 1 {
// There is a single thing here
let prefix = prefix[0];
let designator = analyze_designator(s, suffix_pt);
match s.op_n.get(prefix).kind() {
AstNodeKind::DeclarativeRegion => {
// TODO: The "only if we're currently inside" logic
let scope = s.op_n.get(prefix).scope().unwrap();
// Try to find the thing
if let Some(maybe_found_thing) =
s.op_s.get(scope).get(designator) {
// TODO: handle aliases??
// This should always be true
assert!(maybe_found_thing.len() > 0);
Some(maybe_found_thing.to_owned())
} else {
None
}
},
// TODO: Other things
// This is not a thing we can select into
_ => None,
}
} else {
// This is a function call or something
panic!("NOT IMPLEMENTED YET!");
}
},
_ => panic!("Don't know how to handle this parse tree node!")
}
}
fn analyze_type_mark(s: &mut AnalyzerCoreStateBlob,
pt: &VhdlParseTreeNode, pt_for_loc: &VhdlParseTreeNode)
-> Option<ObjPoolIndex<AstNode>> {
let type_mark_ = analyze_name(s, pt);
if type_mark_.is_none() {
dump_current_location(s, pt_for_loc, true);
s.errors += "ERROR: Bad name for type_mark\n";
return None;
}
let type_mark = type_mark_.unwrap();
if type_mark.len() != 1 {
dump_current_location(s, pt_for_loc, true);
s.errors += "ERROR: Bad name for type_mark\n";
return None;
}
if s.op_n.get(type_mark[0]).kind() != AstNodeKind::Type {
// FIXME: Do I need to do special stuff here anymore?
dump_current_location(s, pt_for_loc, true);
s.errors += "ERROR: name is not a type\n";
return None;
}
Some(type_mark[0])
}
fn analyze_subtype_indication(s: &mut AnalyzerCoreStateBlob,
pt: &VhdlParseTreeNode, pt_for_loc: &VhdlParseTreeNode)
-> Option<ObjPoolIndex<AstNode>> {
match pt.node_type {
ParseTreeNodeType::PT_SUBTYPE_INDICATION => {
// We have an absolutely normal subtype_indication
// TODO: Not implemented
assert!(pt.pieces[1].is_none());
assert!(pt.pieces[2].is_none());
let type_mark =
analyze_type_mark(s, &pt.pieces[0].as_ref().unwrap(),
pt_for_loc);
if type_mark.is_none() {
return None;
}
let x_ = s.op_n.alloc();
{
let x = s.op_n.get_mut(x_);
*x = AstNode::SubtypeIndication {
type_mark: type_mark.unwrap(),
};
}
Some(x_)
},
_ => panic!("Don't know how to handle this parse tree node!")
}
}
fn analyze_subtype_decl(s: &mut AnalyzerCoreStateBlob,
pt: &VhdlParseTreeNode, scope: ObjPoolIndex<Scope>) -> bool {
let id = analyze_identifier(s, &pt.pieces[0].as_ref().unwrap());
s.blacklisted_names.insert(ScopeItemName::Identifier(id));
let loc = pt_loc(s, pt);
let subtype_indication_ = analyze_subtype_indication(s,
&pt.pieces[1].as_ref().unwrap(), pt);
if subtype_indication_.is_none() {
return false;
}
let x_ = s.op_n.alloc();
{
let x = s.op_n.get_mut(x_);
*x = AstNode::SubtypeDecl {
loc: loc,
id: id,
subtype_indication: subtype_indication_.unwrap(),
};
}
if !try_add_declaration(s, ScopeItemName::Identifier(id), x_, scope) {
dump_current_location(s, pt, true);
s.errors += &format!(
"ERROR: Duplicate declaration of subtype {}!\n",
s.sp.retrieve_latin1_str(id.orig_name).pretty_name());
return false;
}
s.blacklisted_names.clear();
true
}
fn analyze_identifier_list(s: &mut AnalyzerCoreStateBlob,
pt: &VhdlParseTreeNode) -> Vec<Identifier> {
let mut ret = vec![];
let mut cur_pt = pt;
// Just parse in backwards order and reverse at the end
while cur_pt.node_type == ParseTreeNodeType::PT_ID_LIST_REAL {
let this_id =
analyze_identifier(s, &cur_pt.pieces[1].as_ref().unwrap());
ret.push(this_id);
cur_pt = &cur_pt.pieces[0].as_ref().unwrap();
}
let final_id = analyze_identifier(s, cur_pt);
ret.push(final_id);
ret.reverse();
ret
}
fn analyze_constant_decl(s: &mut AnalyzerCoreStateBlob,
pt: &VhdlParseTreeNode, scope: ObjPoolIndex<Scope>) -> bool {
let id_list = analyze_identifier_list(s, &pt.pieces[0].as_ref().unwrap());
for &id in &id_list {
s.blacklisted_names.insert(ScopeItemName::Identifier(id));
}
let loc = pt_loc(s, pt);
let subtype_indication_ = analyze_subtype_indication(s,
&pt.pieces[1].as_ref().unwrap(), pt);
if subtype_indication_.is_none() {
return false;
}
// TODO: Not implemented
assert!(pt.pieces[2].is_none());
for id in id_list {
let x_ = s.op_n.alloc();
{
let x = s.op_n.get_mut(x_);
*x = AstNode::ConstantDecl {
loc: loc,
id: id,
subtype_indication: subtype_indication_.unwrap(),
value: None,
};
}
if !try_add_declaration(s, ScopeItemName::Identifier(id), x_, scope) {
dump_current_location(s, pt, true);
s.errors += &format!(
"ERROR: Duplicate declaration of constant {}!\n",
s.sp.retrieve_latin1_str(id.orig_name).pretty_name());
return false;
}
}
s.blacklisted_names.clear();
true
}
fn analyze_interface_item(s: &mut AnalyzerCoreStateBlob,
pt: &VhdlParseTreeNode, used_names: &mut HashSet<Identifier>,
output_vec: &mut Vec<ObjPoolIndex<AstNode>>) -> bool {
match pt.node_type {
ParseTreeNodeType::PT_INTERFACE_CONSTANT_DECLARATION => {
let id_list = analyze_identifier_list(s,
&pt.pieces[0].as_ref().unwrap());
let loc = pt_loc(s, pt);
let subtype_indication_ = analyze_subtype_indication(s,
&pt.pieces[1].as_ref().unwrap(), pt);
if subtype_indication_.is_none() {
return false;
}
// TODO: Not implemented
assert!(pt.pieces[2].is_none());
assert!(pt.pieces[3].is_none());
for id in id_list {
if used_names.contains(&id) {
dump_current_location(s, pt, true);
s.errors += &format!(
"ERROR: Duplicate declaration of interface constant \
{}!\n",
s.sp.retrieve_latin1_str(id.orig_name).pretty_name());
return false;
}
used_names.insert(id);
let x_ = s.op_n.alloc();
{
let x = s.op_n.get_mut(x_);
*x = AstNode::InterfaceConstant {
loc: loc,
id: id,
subtype_indication: subtype_indication_.unwrap(),
value: None,
};
}
output_vec.push(x_);
}
},
_ => panic!("Don't know how to handle this parse tree node!")
}
true
}
fn analyze_parameter_interface_list_real(s: &mut AnalyzerCoreStateBlob,
pt: &VhdlParseTreeNode, used_names: &mut HashSet<Identifier>,
output_vec: &mut Vec<ObjPoolIndex<AstNode>>) -> bool{
match pt.node_type {
ParseTreeNodeType::PT_INTERFACE_LIST => {
if !analyze_parameter_interface_list_real(s,
&pt.pieces[0].as_ref().unwrap(), used_names, output_vec) {
return false;
}
if !analyze_interface_item(s, &pt.pieces[1].as_ref().unwrap(),
used_names, output_vec) {
return false;
}
},
_ => {
if !analyze_interface_item(s, pt, used_names, output_vec) {
return false;
}
}
}
true
}
fn analyze_parameter_interface_list(s: &mut AnalyzerCoreStateBlob,
pt: &VhdlParseTreeNode) -> Option<Vec<ObjPoolIndex<AstNode>>> {
let mut ret = vec![];
let mut arg_names = HashSet::new();
if !analyze_parameter_interface_list_real(s, pt,
&mut arg_names, &mut ret) {
return None;
}
Some(ret)
}
fn analyze_subprogram_spec(s: &mut AnalyzerCoreStateBlob,
pt: &VhdlParseTreeNode, pt_for_loc: &VhdlParseTreeNode,
scope: ObjPoolIndex<Scope>) -> bool {
match pt.node_type {
ParseTreeNodeType::PT_FUNCTION_SPECIFICATION => {
let is_pure = match pt.purity {
ParseTreeFunctionPurity::PURITY_IMPURE => false,
_ => true,
};
let loc = pt_loc(s, pt_for_loc);
let designator =
analyze_designator(s, &pt.pieces[0].as_ref().unwrap());
s.blacklisted_names.insert(designator);
let return_type =
analyze_type_mark(s, &pt.pieces[1].as_ref().unwrap(),
pt_for_loc);
if return_type.is_none() {
return false;
}
let args = analyze_parameter_interface_list(s,
&pt.pieces[3].as_ref().unwrap());
if args.is_none() {
return false;
}
// Not implemented
assert!(pt.pieces[2].is_none());
// We need a new internal name
let internal_name =
format!("__internal_anon_{}", s.internal_name_count);
s.internal_name_count += 1;
let mut internal_designator_id =
Identifier::new_unicode(&mut s.sp, internal_name.as_str(),
true).unwrap();
internal_designator_id.is_internal = true;
let internal_designator =
ScopeItemName::Identifier(internal_designator_id);
// This is the generic piece
let x_ = s.op_n.alloc();
{
let x = s.op_n.get_mut(x_);
*x = AstNode::GenericFunctionDecl {
loc: loc,
designator: internal_designator,
args: args.unwrap(),
return_type: return_type.unwrap(),
is_pure: is_pure,
};
}
if !try_add_declaration(s, internal_designator, x_, scope) {
panic!("Internal compiler error!");
}
// This is the instantiation
let y_ = s.op_n.alloc();
{
let y = s.op_n.get_mut(y_);
*y = AstNode::FuncInstantiation {
loc: loc,
designator: designator,
generic_func: x_,
};
}
if !try_add_declaration(s, designator, y_, scope) {
dump_current_location(s, pt, true);
s.errors += &format!(
"ERROR: Duplicate declaration of function {}!\n",
designator.pretty_name(&mut s.sp));
return false;
}
// FIXME: What is the correct spot to do this?
s.blacklisted_names.clear();
true
},
_ => panic!("Don't know how to handle this parse tree node!")
}
}
fn analyze_subprogram_decl(s: &mut AnalyzerCoreStateBlob,
pt: &VhdlParseTreeNode, scope: ObjPoolIndex<Scope>) -> bool {
// FIXME: How exactly should this work?
analyze_subprogram_spec(s, &pt.pieces[0].as_ref().unwrap(), pt, scope)
}
fn analyze_declarative_item(s: &mut AnalyzerCoreStateBlob,
pt: &VhdlParseTreeNode, decl_scope: ObjPoolIndex<Scope>,
use_scope: ObjPoolIndex<Scope>) -> bool {
match pt.node_type {
ParseTreeNodeType::PT_FULL_TYPE_DECLARATION =>
analyze_type_decl(s, pt, decl_scope),
ParseTreeNodeType::PT_SUBTYPE_DECLARATION =>
analyze_subtype_decl(s, pt, decl_scope),
ParseTreeNodeType::PT_CONSTANT_DECLARATION =>
analyze_constant_decl(s, pt, decl_scope),
ParseTreeNodeType::PT_SUBPROGRAM_DECLARATION =>
analyze_subprogram_decl(s, pt, decl_scope),
_ => panic!("Don't know how to handle this parse tree node!")
}
}
fn analyze_declaration_list(s: &mut AnalyzerCoreStateBlob,
pt: &VhdlParseTreeNode, decl_scope: ObjPoolIndex<Scope>,
use_scope: ObjPoolIndex<Scope>) -> bool {
match pt.node_type {
ParseTreeNodeType::PT_DECLARATION_LIST => {
if !analyze_declaration_list(s, &pt.pieces[0].as_ref().unwrap(),
decl_scope, use_scope) {
return false;
}
analyze_declarative_item(s, &pt.pieces[1].as_ref().unwrap(),
decl_scope, use_scope)
},
_ => analyze_declarative_item(s, pt, decl_scope, use_scope)
}
}
fn analyze_entity(s: &mut AnalyzerCoreStateBlob, pt: &VhdlParseTreeNode,
tgt_scope: ObjPoolIndex<Scope>) -> bool {
// Location information
let loc = pt_loc(s, pt);
// Our name
let id = analyze_identifier(s, &pt.pieces[0].as_ref().unwrap());
// Verify that the id at the end (if any) is the same as the one in the
// beginning
if let Some(tail_id_pt) = pt.pieces[4].as_ref() {
let tail_id = analyze_identifier(s, tail_id_pt);
if tail_id != id {
dump_current_location(s, pt, true);
s.errors +=
"ERROR: Name at end of entity must match name at beginning\n";
return false;
}
}
// Check for duplicate entity
// FIXME: Probably have to replace existing one at some point???
let old_node_idx = s.op_l.get(s.work_lib.unwrap()).find_design_unit(id);
if old_node_idx.is_some() {
dump_current_location(s, pt, true);
s.errors += &format!(
"ERROR: Design unit {} already exists in library!\n",
s.sp.retrieve_latin1_str(id.orig_name).pretty_name());
let old_node = s.op_n.get(old_node_idx.unwrap());
if let Some(old_loc) = old_node.loc() {
s.errors += &format!("\tPrevious version was at {}\n",
old_loc.format_for_error(&s.sp));
}
return false;
}
// Create scopes and chain them up properly.
// FIXME: Do note that we cheat a bunch here and don't pop off scope chain
// nodes when things fail. This is fine because the next design unit
// will start with an empty scope chain. However, we do need to remember
// to remove these on success except on toplevel design units.
let use_scope = s.op_s.alloc();
let decl_scope = s.op_s.alloc();
let use_sc = s.op_sc.alloc();
let decl_sc = s.op_sc.alloc();
{
let use_sc_ = s.op_sc.get_mut(use_sc);
*use_sc_ = ScopeChainNode::X {
this_scope: use_scope,
parent: s.innermost_scope
};
}
{
let decl_sc_ = s.op_sc.get_mut(decl_sc);
*decl_sc_ = ScopeChainNode::X {
this_scope: decl_scope,
parent: Some(use_sc),
}
}
s.innermost_scope = Some(decl_sc);
// Set up the actual entity object
let e_ = s.op_n.alloc();
{
let e = s.op_n.get_mut(e_);
*e = AstNode::Entity {
loc: loc,
id: id,
scope: decl_scope,
scope_chain: decl_sc,
};
}
s.op_s.get_mut(tgt_scope).add(ScopeItemName::Identifier(id), e_);
// TODO
// Declarations
if let Some(decl_pt) = pt.pieces[2].as_ref() {
if !analyze_declaration_list(s, decl_pt, decl_scope, use_scope) {
return false;
}
}
// Add the thing we analyzed to the library
s.op_l.get_mut(s.work_lib.unwrap()).add_design_unit(id, e_);
true
}
// Analyzes PT_DESIGN_UNIT
fn analyze_design_unit(s: &mut AnalyzerCoreStateBlob, pt: &VhdlParseTreeNode)
-> bool {
let mut no_errors = true;
// Root declarative region
let root_decl_region = s.op_sc.alloc();
let root_decl_region_scope = s.op_s.alloc();
{
let root_decl_region_ = s.op_sc.get_mut(root_decl_region);
*root_decl_region_ = ScopeChainNode::X {
this_scope: root_decl_region_scope,
parent: None,
};
}
s.innermost_scope = Some(root_decl_region);
// Not implemented
assert!(pt.pieces[1].is_none());
match pt.pieces[0].as_ref().unwrap().node_type {
ParseTreeNodeType::PT_ENTITY => {
no_errors &= analyze_entity(
s, &pt.pieces[0].as_ref().unwrap(), root_decl_region_scope);
},
_ => panic!("Don't know how to handle this parse tree node!")
};
no_errors
}
// Analyzes PT_DESIGN_FILE or PT_DESIGN_UNIT
fn analyze_design_file(s: &mut AnalyzerCoreStateBlob, pt: &VhdlParseTreeNode)
-> bool {
let mut no_errors = true;
match pt.node_type {
ParseTreeNodeType::PT_DESIGN_UNIT => {
no_errors &= analyze_design_unit(s, pt);
},
ParseTreeNodeType::PT_DESIGN_FILE => {
no_errors &= analyze_design_file(
s, &pt.pieces[0].as_ref().unwrap());
no_errors &= analyze_design_unit(
s, &pt.pieces[1].as_ref().unwrap());
},
_ => panic!("Don't know how to handle this parse tree node!")
};
no_errors
}
// The core of the analysis code. This analyses the parse output of a single
// design file (stored in pt) into the design library work_lib. The design
// library work_lib must have already been added to the design database,
// design_db. design_db contains "everything" that possibly exists in the
// VHDL design, including all libraries. Errors are reported in the "errors"
// variable and warnings are reported in the "warnings" variable. This
// function returns true iff there were no errors.
pub fn vhdl_analyze_file(s: &mut AnalyzerCoreStateBlob, pt: &VhdlParseTreeNode,
work_lib: ObjPoolIndex<Library>, file_name: &OsStr) -> bool {
let fn_str_idx = s.sp.add_osstr(file_name);
s.work_lib = Some(work_lib);
s.current_file_name = Some(fn_str_idx);
s.innermost_scope = None;
analyze_design_file(s, pt)
}
#[cfg(test)]
mod tests {
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.