text stringlengths 8 4.13M |
|---|
use friday_error::{FridayError, propagate, frierr};
use serde_json;
use serde::Serialize;
use serde::de::DeserializeOwned;
pub mod core;
pub mod server;
pub mod webgui;
pub mod path;
pub mod endpoint;
pub mod vendor;
mod impl_tiny_http;
mod tests;
pub fn get_name(
r: &mut dyn core::FridayRequest,
endpoints: &Vec<endpoint::Endpoint>) -> Result<String, FridayError> {
let url_str = r.url();
let url_str_ref = &url_str;
url::Url::parse(&url_str_ref.clone()).map_or_else(
|err| frierr!("Failed to parse url {} - Reason: {}", url_str_ref.clone(), err),
|u| endpoint::Endpoint::match_on_path(u.path(), endpoints).map_or_else(
propagate!("Failed to get endpoint for {}", url_str_ref.clone()),
|name| Ok(name.to_owned())
))
}
pub fn response_forbidden<S: AsRef<str>>(message: S) -> Result<core::Response, FridayError> {
Ok(core::Response::TEXT { status: 405, content: String::from(message.as_ref()) })
}
#[macro_export]
macro_rules! forbidden {
($str:expr $(,$arg: expr)*) => {
$crate::response_forbidden(format!($str $(,$arg)*)).into()
}
}
pub fn response_not_acceptable<S: AsRef<str>>(message: S) -> Result<core::Response, FridayError> {
Ok(core::Response::TEXT { status: 406, content: String::from(message.as_ref()) })
}
#[macro_export]
macro_rules! not_acceptable {
($str:expr $(,$arg: expr)*) => {
$crate::response_not_acceptable(format!($str $(,$arg)*)).into()
}
}
pub fn response_ok<S: AsRef<str>>(message: S) -> core::Response {
let owned_message = message.as_ref().to_owned();
core::Response::JSON {
status: 200,
content: serde_json::json!({
"ok": true,
"message": owned_message
}).to_string()
}
}
#[macro_export]
macro_rules! ok {
($str:expr $(,$arg: expr)*) => {
$crate::response_ok(format!($str $(,$arg)*)).into()
}
}
pub fn response_not_ok<S: AsRef<str>>(message: S) -> core::Response {
let owned_message = message.as_ref().to_owned();
core::Response::JSON {
status: 200,
content: serde_json::json!({
"ok": false,
"message": owned_message
}).to_string()
}
}
#[macro_export]
macro_rules! not_ok {
($str:expr $(,$arg: expr)*) => {
$crate::response_not_ok(format!($str $(,$arg)*)).into()
}
}
pub fn response_json<S>(status: i32, value: &S) -> Result<core::Response, FridayError>
where S: Serialize {
serde_json::to_string(value).map_or_else(
|err| frierr!("Failed to serialize JSON response - Reason: {}", err),
|content| Ok(core::Response::JSON{ status, content}))
}
#[macro_export]
macro_rules! json {
($status:expr, $obj:expr) => {
$crate::response_json($status, $obj).into()
}
}
impl<T> Into<Result<core::Response, T>> for core::Response {
fn into(self) -> Result<core::Response, T> {
return Ok(self);
}
}
pub fn request_json<S>(
r: &mut dyn core::FridayRequest,
f: &dyn Fn(S) -> Result<core::Response, FridayError>) -> Result<core::Response, FridayError>
where S: DeserializeOwned {
match r.data() {
Ok(data) => match data {
core::Data::JSON { json } =>
serde_json::from_str(&json).map_or_else(
|err| frierr!(
"Failed to deserialize JSON body - Reason : {}\n\
BODY: {}", err, json),
f),
_ => not_acceptable!("Body must contain JSON data"),
},
Err(err) => propagate!("Unable to get data from request")(err)
}
}
|
use std::io::{self, Read, Write};
use std::net::{self, Shutdown, SocketAddr, ToSocketAddrs};
#[cfg(feature = "io_timeout")]
use std::time::Duration;
use crate::io as io_impl;
use crate::io::net as net_impl;
use crate::io::split_io::{SplitIo, SplitReader, SplitWriter};
#[cfg(unix)]
use crate::io::sys::mod_socket;
#[cfg(unix)]
use crate::io::AsIoData;
#[cfg(feature = "io_timeout")]
use crate::sync::atomic_dur::AtomicDuration;
use crate::yield_now::yield_with_io;
// ===== TcpStream =====
//
//
#[derive(Debug)]
pub struct TcpStream {
_io: io_impl::IoData,
sys: net::TcpStream,
#[cfg(feature = "io_timeout")]
read_timeout: AtomicDuration,
#[cfg(feature = "io_timeout")]
write_timeout: AtomicDuration,
}
impl TcpStream {
fn new(s: net::TcpStream) -> io::Result<TcpStream> {
// only set non blocking in coroutine context
// we would first call nonblocking io in the coroutine
// to avoid unnecessary context switch
s.set_nonblocking(true)?;
io_impl::add_socket(&s).map(|io| TcpStream {
_io: io,
sys: s,
#[cfg(feature = "io_timeout")]
read_timeout: AtomicDuration::new(None),
#[cfg(feature = "io_timeout")]
write_timeout: AtomicDuration::new(None),
})
}
#[inline]
pub fn inner(&self) -> &net::TcpStream {
&self.sys
}
#[inline]
pub fn inner_mut(&mut self) -> &mut net::TcpStream {
&mut self.sys
}
pub fn connect<A: ToSocketAddrs>(addr: A) -> io::Result<TcpStream> {
let mut c = net_impl::TcpStreamConnect::new(
addr,
#[cfg(feature = "io_timeout")]
None,
)?;
#[cfg(unix)]
{
if c.check_connected()? {
return c.done();
}
}
yield_with_io(&c, c.is_coroutine);
c.done()
}
#[cfg(feature = "io_timeout")]
pub fn connect_timeout(addr: &SocketAddr, timeout: Duration) -> io::Result<TcpStream> {
let mut c = net_impl::TcpStreamConnect::new(addr, Some(timeout))?;
#[cfg(unix)]
{
if c.check_connected()? {
return c.done();
}
}
yield_with_io(&c, c.is_coroutine);
c.done()
}
pub fn peer_addr(&self) -> io::Result<SocketAddr> {
self.sys.peer_addr()
}
pub fn local_addr(&self) -> io::Result<SocketAddr> {
self.sys.local_addr()
}
#[cfg(not(windows))]
pub fn try_clone(&self) -> io::Result<TcpStream> {
let s = self.sys.try_clone().and_then(TcpStream::new)?;
#[cfg(feature = "io_timeout")]
s.set_read_timeout(self.read_timeout.get()).unwrap();
#[cfg(feature = "io_timeout")]
s.set_write_timeout(self.write_timeout.get()).unwrap();
Ok(s)
}
// windows doesn't support add dup handler to IOCP
#[cfg(windows)]
pub fn try_clone(&self) -> io::Result<TcpStream> {
let s = self.sys.try_clone()?;
s.set_nonblocking(true)?;
// ignore the result here
// it always failed with "The parameter is incorrect"
io_impl::add_socket(&s).ok();
Ok(TcpStream {
_io: io_impl::IoData::new(0),
sys: s,
#[cfg(feature = "io_timeout")]
read_timeout: AtomicDuration::new(self.read_timeout.get()),
#[cfg(feature = "io_timeout")]
write_timeout: AtomicDuration::new(self.write_timeout.get()),
})
}
pub fn shutdown(&self, how: Shutdown) -> io::Result<()> {
self.sys.shutdown(how)
}
pub fn set_nodelay(&self, nodelay: bool) -> io::Result<()> {
self.sys.set_nodelay(nodelay)
}
pub fn take_error(&self) -> io::Result<Option<io::Error>> {
self.sys.take_error()
}
#[cfg(feature = "io_timeout")]
pub fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
self.sys.set_read_timeout(dur)?;
self.read_timeout.store(dur);
Ok(())
}
#[cfg(feature = "io_timeout")]
pub fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
self.sys.set_write_timeout(dur)?;
self.write_timeout.store(dur);
Ok(())
}
#[cfg(feature = "io_timeout")]
pub fn read_timeout(&self) -> io::Result<Option<Duration>> {
Ok(self.read_timeout.get())
}
#[cfg(feature = "io_timeout")]
pub fn write_timeout(&self) -> io::Result<Option<Duration>> {
Ok(self.write_timeout.get())
}
pub fn set_ttl(&self, ttl: u32) -> io::Result<()> {
self.sys.set_ttl(ttl)
}
pub fn ttl(&self) -> io::Result<u32> {
self.sys.ttl()
}
// convert std::net::TcpStream to Self without add_socket
pub(crate) fn from_stream(s: net::TcpStream, io: io_impl::IoData) -> Self {
TcpStream {
_io: io,
sys: s,
#[cfg(feature = "io_timeout")]
read_timeout: AtomicDuration::new(None),
#[cfg(feature = "io_timeout")]
write_timeout: AtomicDuration::new(None),
}
}
}
impl Read for TcpStream {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
#[cfg(unix)]
{
self._io.reset();
// this is an earlier return try for nonblocking read
// it's useful for server but not necessary for client
match self.sys.read(buf) {
Ok(n) => return Ok(n),
Err(e) => {
// raw_os_error is faster than kind
let raw_err = e.raw_os_error();
if raw_err == Some(libc::EAGAIN) || raw_err == Some(libc::EWOULDBLOCK) {
// do nothing here
} else {
return Err(e);
}
}
}
}
let mut reader = net_impl::SocketRead::new(
self,
buf,
#[cfg(feature = "io_timeout")]
self.read_timeout.get(),
);
yield_with_io(&reader, reader.is_coroutine);
reader.done()
}
}
impl Write for TcpStream {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
#[cfg(unix)]
{
self._io.reset();
// this is an earlier return try for nonblocking write
match self.sys.write(buf) {
Ok(n) => return Ok(n),
Err(e) => {
// raw_os_error is faster than kind
let raw_err = e.raw_os_error();
if raw_err == Some(libc::EAGAIN) || raw_err == Some(libc::EWOULDBLOCK) {
// do nothing here
} else {
return Err(e);
}
}
}
}
let mut writer = net_impl::SocketWrite::new(
self,
buf,
#[cfg(feature = "io_timeout")]
self.write_timeout.get(),
);
yield_with_io(&writer, writer.is_coroutine);
writer.done()
}
#[cfg(unix)]
fn write_vectored(&mut self, bufs: &[io::IoSlice<'_>]) -> io::Result<usize> {
#[cfg(unix)]
{
self._io.reset();
// this is an earlier return try for nonblocking write
match self.sys.write_vectored(bufs) {
Ok(n) => return Ok(n),
Err(e) => {
// raw_os_error is faster than kind
let raw_err = e.raw_os_error();
if raw_err == Some(libc::EAGAIN) || raw_err == Some(libc::EWOULDBLOCK) {
// do nothing here
} else {
return Err(e);
}
}
}
}
let mut writer = net_impl::SocketWriteVectored::new(
self,
&self.sys,
bufs,
#[cfg(feature = "io_timeout")]
self.write_timeout.get(),
);
yield_with_io(&writer, writer.is_coroutine);
writer.done()
}
fn flush(&mut self) -> io::Result<()> {
// TcpStream just return Ok(()), no need to yield
self.sys.flush()
}
}
// impl<'a> Read for &'a TcpStream {
// fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
// let s = unsafe { &mut *(*self as *const _ as *mut _) };
// TcpStream::read(s, buf)
// }
// }
// impl<'a> Write for &'a TcpStream {
// fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
// let s = unsafe { &mut *(*self as *const _ as *mut _) };
// TcpStream::write(s, buf)
// }
// fn flush(&mut self) -> io::Result<()> {
// let s = unsafe { &mut *(*self as *const _ as *mut _) };
// TcpStream::flush(s)
// }
// }
#[cfg(unix)]
impl io_impl::AsIoData for TcpStream {
fn as_io_data(&self) -> &io_impl::IoData {
&self._io
}
}
// ===== TcpListener =====
//
//
#[derive(Debug)]
pub struct TcpListener {
_io: io_impl::IoData,
sys: net::TcpListener,
}
impl TcpListener {
fn new(s: net::TcpListener) -> io::Result<TcpListener> {
// only set non blocking in coroutine context
// we would first call nonblocking io in the coroutine
// to avoid unnecessary context switch
s.set_nonblocking(true)?;
io_impl::add_socket(&s).map(|io| TcpListener { _io: io, sys: s })
}
#[inline]
pub fn inner(&self) -> &net::TcpListener {
&self.sys
}
pub fn bind<A: ToSocketAddrs>(addr: A) -> io::Result<TcpListener> {
use socket2::{Domain, Socket, Type};
let mut addrs = addr.to_socket_addrs()?;
let addr = addrs.next().unwrap();
let listener = match &addr {
SocketAddr::V4(_) => Socket::new(Domain::IPV4, Type::STREAM, None)?,
SocketAddr::V6(_) => Socket::new(Domain::IPV6, Type::STREAM, None)?,
};
// windows not have reuse port but reuse address is not safe
listener.set_reuse_address(true)?;
// #[cfg(unix)]
// listener.set_reuse_port(true)?;
listener.bind(&addr.into())?;
// for addr in addrs {
// listener.bind(&addr.into())?;
// }
listener.listen(1024)?;
let s = listener.into();
TcpListener::new(s)
}
pub fn accept(&self) -> io::Result<(TcpStream, SocketAddr)> {
#[cfg(unix)]
{
self._io.reset();
match self.sys.accept() {
Ok((s, a)) => return TcpStream::new(s).map(|s| (s, a)),
Err(e) => {
// raw_os_error is faster than kind
let raw_err = e.raw_os_error();
if raw_err == Some(libc::EAGAIN) || raw_err == Some(libc::EWOULDBLOCK) {
// do nothing here
} else {
return Err(e);
}
}
}
}
let mut a = net_impl::TcpListenerAccept::new(self)?;
yield_with_io(&a, a.is_coroutine);
a.done()
}
pub fn incoming(&self) -> Incoming {
Incoming { listener: self }
}
pub fn local_addr(&self) -> io::Result<SocketAddr> {
self.sys.local_addr()
}
#[cfg(not(windows))]
pub fn try_clone(&self) -> io::Result<TcpListener> {
self.sys.try_clone().and_then(TcpListener::new)
}
// windows doesn't support add dup handler to IOCP
#[cfg(windows)]
pub fn try_clone(&self) -> io::Result<TcpListener> {
let s = self.sys.try_clone()?;
s.set_nonblocking(true)?;
io_impl::add_socket(&s).ok();
Ok(TcpListener {
_io: io_impl::IoData::new(0),
sys: s,
})
}
pub fn take_error(&self) -> io::Result<Option<io::Error>> {
self.sys.take_error()
}
// TODO: add all std functions
}
#[cfg(unix)]
impl io_impl::AsIoData for TcpListener {
fn as_io_data(&self) -> &io_impl::IoData {
&self._io
}
}
// ===== Incoming =====
//
//
pub struct Incoming<'a> {
listener: &'a TcpListener,
}
impl<'a> Iterator for Incoming<'a> {
type Item = io::Result<TcpStream>;
fn next(&mut self) -> Option<io::Result<TcpStream>> {
Some(self.listener.accept().map(|p| p.0))
}
}
// ===== UNIX ext =====
//
//
#[cfg(unix)]
use std::os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd};
#[cfg(unix)]
impl IntoRawFd for TcpStream {
fn into_raw_fd(self) -> RawFd {
self.sys.into_raw_fd()
// drop self will deregister from the selector
}
}
#[cfg(unix)]
impl AsRawFd for TcpStream {
fn as_raw_fd(&self) -> RawFd {
self.sys.as_raw_fd()
}
}
#[cfg(unix)]
impl FromRawFd for TcpStream {
unsafe fn from_raw_fd(fd: RawFd) -> TcpStream {
TcpStream::new(FromRawFd::from_raw_fd(fd))
.unwrap_or_else(|e| panic!("from_raw_socket for TcpStream, err = {e:?}"))
}
}
#[cfg(unix)]
impl IntoRawFd for TcpListener {
fn into_raw_fd(self) -> RawFd {
self.sys.into_raw_fd()
// drop self will deregister from the selector
}
}
#[cfg(unix)]
impl AsRawFd for TcpListener {
fn as_raw_fd(&self) -> RawFd {
self.sys.as_raw_fd()
}
}
#[cfg(unix)]
impl FromRawFd for TcpListener {
unsafe fn from_raw_fd(fd: RawFd) -> TcpListener {
let s: net::TcpListener = FromRawFd::from_raw_fd(fd);
TcpListener::new(s)
.unwrap_or_else(|e| panic!("from_raw_socket for TcpListener, err = {e:?}"))
}
}
impl SplitIo for TcpStream {
fn split(self) -> io::Result<(SplitReader<Self>, SplitWriter<Self>)> {
let writer = self.try_clone()?;
#[cfg(unix)]
mod_socket(writer.as_io_data(), false)?;
#[cfg(unix)]
mod_socket(self.as_io_data(), true)?;
Ok((SplitReader::new(self), SplitWriter::new(writer)))
}
}
// ===== Windows ext =====
//
//
#[cfg(windows)]
use std::os::windows::io::{AsRawSocket, FromRawSocket, IntoRawSocket, RawSocket};
#[cfg(windows)]
impl IntoRawSocket for TcpStream {
fn into_raw_socket(self) -> RawSocket {
self.sys.into_raw_socket()
}
}
#[cfg(windows)]
impl AsRawSocket for TcpStream {
fn as_raw_socket(&self) -> RawSocket {
self.sys.as_raw_socket()
}
}
#[cfg(windows)]
impl FromRawSocket for TcpStream {
unsafe fn from_raw_socket(s: RawSocket) -> TcpStream {
// TODO: set the time out info here
// need to set the read/write timeout from sys and sync each other
TcpStream::new(FromRawSocket::from_raw_socket(s))
.unwrap_or_else(|e| panic!("from_raw_socket for TcpStream, err = {e:?}"))
}
}
#[cfg(windows)]
impl IntoRawSocket for TcpListener {
fn into_raw_socket(self) -> RawSocket {
self.sys.into_raw_socket()
}
}
#[cfg(windows)]
impl AsRawSocket for TcpListener {
fn as_raw_socket(&self) -> RawSocket {
self.sys.as_raw_socket()
}
}
#[cfg(windows)]
impl FromRawSocket for TcpListener {
unsafe fn from_raw_socket(s: RawSocket) -> TcpListener {
let s: net::TcpListener = FromRawSocket::from_raw_socket(s);
TcpListener::new(s)
.unwrap_or_else(|e| panic!("from_raw_socket for TcpListener, err = {e:?}"))
}
}
|
#[cfg(feature = "normalize")]
extern crate nalgebra;
extern crate serial;
#[cfg(feature = "normalize")]
use nalgebra::*;
use std::fs::{File, OpenOptions};
use std::str::FromStr;
use std::error::Error;
use std::fmt;
use std::io::{BufRead, BufReader, Lines};
#[macro_use]
mod calculus;
use calculus::*;
mod circbuf;
#[macro_use]
mod noconsume;
use noconsume::*;
mod serialbuf;
use serialbuf::*;
#[cfg(all(feature = "hidegravity", feature = "smooth"))]
mod smoothing;
#[cfg(all(feature = "hidegravity", feature = "smooth"))]
use smoothing::*;
#[cfg(feature = "visualize")]
mod visualize;
#[derive(Debug, Default, Clone, Copy)]
pub struct RawTelemUnit {
pub delta_t: f32,
pub acc_x: f32,
pub acc_y: f32,
pub acc_z: f32,
pub roll: f32,
pub pitch: f32,
pub yaw: f32,
}
impl FromStr for RawTelemUnit {
type Err = Box<dyn Error>;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let values = s.trim().split(", ").collect::<Vec<&str>>();
if values.len() != 7 {
return Err(Box::new(CSVDeErr("Incorrectly sized telemetry data unit detected, dropping entire row.".to_owned())));
}
Ok(RawTelemUnit {
delta_t: values[0].parse::<u32>()? as f32 * 1e-6,// Input is in microseconds
acc_x: values[1].parse::<f32>()?,
acc_y: values[2].parse::<f32>()?,
acc_z: values[3].parse::<f32>()?,
roll: values[4].parse::<f32>()?,
pitch: values[5].parse::<f32>()?,
yaw: values[6].parse::<f32>()?,
})
}
}
#[derive(Debug)]
pub struct CSVDeErr(String);
impl fmt::Display for CSVDeErr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl Error for CSVDeErr {}
pub struct BufCSV<R> {
source: Lines<R>,
line_index: usize,
}
impl<R: BufRead> BufCSV<R> {
pub fn new(source: R) -> Self {
BufCSV {
source: source.lines(),
line_index: 0,
}
}
}
impl BufCSV<BufReader<File>> {
pub fn from_file(path: &str) -> Result<Self, Box<dyn Error>> {
let source = OpenOptions::new()
.read(true)
.open(path)?;
let br = BufReader::new(source);
Ok(Self::new(br))
}
}
impl<T: BufRead> Iterator for BufCSV<T> {
type Item = RawTelemUnit;
fn next(&mut self) -> Option<Self::Item> {
self.line_index += 1;
if let Some(Ok(line)) = self.source.next() {
match line.parse() {
Ok(rtu) => Some(rtu),
Err(err) => {
eprintln!("Deserializing line {} failed: {:?}", self.line_index, err);
self.next()
}
}
} else {
None
}
}
}
fn main() -> Result<(), Box<dyn Error>> {
let into_radians = std::f32::consts::PI / 180.;
// /dev/ttyUSB0 is the port if the clone arduino uses the CH340 USB chip, for genuine arduinos using ATmega16u2, use /dev/ttyACM0
let bufcsv = BufCSV::new(Cereal::new("/dev/ttyUSB0"));
// let bufcsv = BufCSV::from_file("testdata/drop.csv")?;
let data = bufcsv
.map(|mut tdb| {
#[cfg(feature = "normalize")] {
tdb.roll *= into_radians;
tdb.pitch *= into_radians;
tdb.yaw *= into_radians;
let point = Point3::new(tdb.acc_x, tdb.acc_y, tdb.acc_z);
let rot = Rotation3::from_euler_angles(-tdb.roll, -tdb.pitch, -tdb.yaw);
let norm_accel: Point3<f32> = rot.transform_point(&point);
return [tdb.delta_t, norm_accel[0], norm_accel[1], norm_accel[2]];
}
#[cfg(not(feature = "normalize"))] {
[tdb.delta_t, tdb.acc_x, tdb.acc_y, tdb.acc_z]
}
});
let mut unziperator = Unziperator::new(data);
let ax = unziperator.subscribe();
let ay = unziperator.subscribe();
let az = unziperator.subscribe();
let mut dt = Teeterator::new(unziperator);
#[cfg(feature = "hidegravity")]
let (jx, jy, jz) = calculus!(dt, DifferentiateF32, ax, ay, az);
#[cfg(all(feature = "hidegravity", feature = "smooth"))]
let (jx, jy, jz) = calculus!(dt, WeightedMovingAvgF32, jx, jy, jz);
#[cfg(feature = "hidegravity")]
let (ax, ay, az) = calculus!(dt, IntegrateF32, jx, jy, jz);
// TODO: Motion compensation so it doesn't fly off
// let (vx, vy, vz) = calculus!(dt, IntegrateF32, ax, ay, az);
let data_src = dt
.zip(ax)
.zip(ay
.zip(az)
)
.map(|((t, x), (y, z))| {
(t, x, y, z)
});
#[cfg(feature = "visualize")]
visualize::run(data_src);
Ok(())
}
|
//! Generic vector with two components.
use std::{cmp::Ord, ops::*};
/// Generic vector with two components.
///
/// It implements multiple operators (for each combination of owned and borrowed
/// args), namely addition, subtraction, element-wise multiplication,
/// element-wise division and multiplication & division by a number. (Note that
/// you can only multiply and divide in the following order: `vector op number`,
/// since it is not possible to implement a foreign trait on `T`.)
///
/// This crate exports a specific version of [`Vecc`](crate::vecc::Vecc) with
/// [`f64`](f64) components — [`Fecc`](crate::fecc::Fecc). It implements
/// additional methods and is heavily inspired by [`p5.Vector`](https://p5js.org/reference/#/p5.Vector).
///
/// # Examples
///
/// Basic arithmetic.
///
/// ```
/// use veccentric::Vecc;
///
/// let a = Vecc::new(3_i32, 4);
/// let b = a * 5; // (15, 20)
/// let c = Vecc::new(-10, -8);
/// let d = b - c; // (5, 12)
/// let e = -d; // (-5, -12)
/// ```
///
/// Shorthand construction using [`From`](std::convert::From).
///
/// ```
/// use veccentric::Vecc;
///
/// let a: Vecc<i32> = (10, 5).into();
/// ```
///
/// Using [`Fecc`](crate::fecc::Fecc)'s extended API.
///
/// ```
/// # use float_cmp::assert_approx_eq;
/// # use std::f64::consts::PI;
/// use veccentric::Fecc;
///
/// let a: Fecc = (3.0, 4.0).into();
/// let b = a / 0.2; // (15.0, 20.0)
/// let c = b.limit(20.0); // (12.0, 16.0)
/// let d = c.rotate(PI); // (-12.0, -16.0)
/// let e = d.turn(0.0); // (20.0, 0.0)
///
/// assert_approx_eq!(f64, e.mag(), 20.0);
/// ```
#[derive(Copy, Clone, Eq, PartialEq, Default, Hash, Debug)]
pub struct Vecc<T> {
#[allow(missing_docs)]
pub x: T,
#[allow(missing_docs)]
pub y: T,
}
impl<T> Vecc<T> {
/// Constructs a new vector.
///
/// # Examples
///
/// ```
/// use veccentric::Vecc;
///
/// let a: Vecc<i32> = Vecc::new(10, 0);
/// ```
///
/// You can also construct it from a tuple:
///
/// ```
/// use veccentric::Vecc;
///
/// let a: Vecc<i32> = (10, 0).into();
/// ```
pub fn new(x: T, y: T) -> Self {
Self { x, y }
}
/// Takes a dot product of the vector with another.
///
/// # Examples
///
/// ```
/// use veccentric::Vecc;
///
/// let a: Vecc<i32> = Vecc::new(10, 0);
/// let b: Vecc<i32> = Vecc::new(5, 0);
///
/// assert_eq!(a.dot(b), 50);
/// ```
pub fn dot(self, rhs: Vecc<T>) -> T
where
T: Add<Output = T> + Mul<Output = T> + Copy,
{
self.x * rhs.x + self.y * rhs.y
}
/// Takes the cross-product (a scalar) of the vector with another.
///
/// # Examples
///
/// ```
/// use veccentric::Vecc;
///
/// let a: Vecc<i32> = Vecc::new(10, 0);
/// let b: Vecc<i32> = Vecc::new(0, -10);
///
/// assert_eq!(a.cross(b), -100);
/// ```
pub fn cross(self, rhs: Vecc<T>) -> T
where
T: Sub<Output = T> + Mul<Output = T> + Copy,
{
self.x * rhs.y - self.y * rhs.x
}
}
/// Advanced Rust-magic. This trait is needed to implement `min` and `max` for
/// `Fecc`, otherwise it conflicts with `Vecc<T>`'s implementation. Big thanks to [u/fisgoda](https://www.reddit.com/user/figsoda/) ([link to Reddit post](https://www.reddit.com/r/rust/comments/paw1lm/implementation_of_from_for_generic_struct/)).
pub auto trait Notf64 {}
impl !Notf64 for f64 {}
impl<T> Vecc<T>
where
T: Ord + Notf64,
{
/// Performs element-wise [`min`](std::cmp::Ord::min).
///
/// # Examples
///
/// ```
/// use veccentric::Vecc;
///
/// let a: Vecc<i32> = Vecc::new(-100, 100);
/// let b: Vecc<i32> = Vecc::new(0, 0);
/// let min = a.min(b);
///
/// assert_eq!(min.x, -100);
/// assert_eq!(min.y, 0);
/// ```
pub fn min(self, rhs: Vecc<T>) -> Vecc<T> {
Self {
x: self.x.min(rhs.x),
y: self.y.min(rhs.y),
}
}
/// Performs element-wise [`max`](std::cmp::Ord::max).
///
/// # Examples
///
/// ```
/// use veccentric::Vecc;
///
/// let a: Vecc<i32> = Vecc::new(-100, 100);
/// let b: Vecc<i32> = Vecc::new(0, 0);
/// let max = a.max(b);
///
/// assert_eq!(max.x, 0);
/// assert_eq!(max.y, 100);
/// ```
pub fn max(self, rhs: Vecc<T>) -> Vecc<T> {
Self {
x: self.x.max(rhs.x),
y: self.y.max(rhs.y),
}
}
/// Performs element-wise [`clamp`](std::cmp::Ord::clamp).
///
/// # Examples
///
/// ```
/// use veccentric::Vecc;
///
/// let a: Vecc<i32> = Vecc::new(-100, 100);
/// let min: Vecc<i32> = Vecc::new(0, 10);
/// let max: Vecc<i32> = Vecc::new(0, 10);
/// let clamped = a.clamp(min, max);
///
/// assert_eq!(clamped.x, 0);
/// assert_eq!(clamped.y, 10);
/// ```
pub fn clamp(self, min: Vecc<T>, max: Vecc<T>) -> Vecc<T> {
Self {
x: self.x.clamp(min.x, max.x),
y: self.y.clamp(min.y, max.y),
}
}
}
impl<T> From<(T, T)> for Vecc<T> {
/// Constructs a new vector from a tuple.
///
/// # Examples
///
/// ```
/// use veccentric::Vecc;
///
/// let a: Vecc<i32> = (10, 0).into();
/// ```
fn from((x, y): (T, T)) -> Self {
Self { x, y }
}
}
#[allow(clippy::from_over_into)]
impl<T> Into<(T, T)> for Vecc<T> {
fn into(self) -> (T, T) {
(self.x, self.y)
}
}
/// Advanced Rust-magic. This trait is needed to implement `From<Vecc<U>> for
/// `Vecc<T>`, otherwise it conflicts with core's implementation of `From<T> for T` (when `U == T`). Big thanks to [u/fisgoda](https://www.reddit.com/user/figsoda/) ([link to Reddit post](https://www.reddit.com/r/rust/comments/paw1lm/implementation_of_from_for_generic_struct/)).
pub auto trait Different {}
impl<T> !Different for (T, T) {}
impl<T, U> From<Vecc<U>> for Vecc<T>
where
T: From<U>,
(T, U): Different,
{
fn from(other: Vecc<U>) -> Vecc<T> {
Vecc {
x: From::from(other.x),
y: From::from(other.y),
}
}
}
// Unary operators.
// Neg.
// Owned.
impl<T, U> Neg for Vecc<T>
where
T: Neg<Output = U>,
{
type Output = Vecc<U>;
fn neg(self) -> Self::Output {
Vecc {
x: self.x.neg(),
y: self.y.neg(),
}
}
}
// Borrowed.
impl<T, U> Neg for &Vecc<T>
where
T: Neg<Output = U> + Copy,
{
type Output = Vecc<U>;
fn neg(self) -> Self::Output {
Vecc {
x: self.x.neg(),
y: self.y.neg(),
}
}
}
// Not.
// Owned.
impl<T, U> Not for Vecc<T>
where
T: Not<Output = U>,
{
type Output = Vecc<U>;
fn not(self) -> Self::Output {
Vecc {
x: self.x.not(),
y: self.y.not(),
}
}
}
// Borrowed.
impl<T, U> Not for &Vecc<T>
where
T: Not<Output = U> + Copy,
{
type Output = Vecc<U>;
fn not(self) -> Self::Output {
Vecc {
x: self.x.not(),
y: self.y.not(),
}
}
}
// Binary operators.
// Add.
// Owned & owned.
impl<T> Add<Vecc<T>> for Vecc<T>
where
T: Add<Output = T>,
{
type Output = Vecc<T>;
fn add(self, rhs: Vecc<T>) -> Self::Output {
Vecc {
x: self.x.add(rhs.x),
y: self.y.add(rhs.y),
}
}
}
// Owned & borrowed.
impl<T> Add<&Vecc<T>> for Vecc<T>
where
T: Add<Output = T> + Copy,
{
type Output = Vecc<T>;
fn add(self, rhs: &Vecc<T>) -> Self::Output {
Vecc {
x: self.x.add(rhs.x),
y: self.y.add(rhs.y),
}
}
}
// Borrowed & owned.
impl<T> Add<Vecc<T>> for &Vecc<T>
where
T: Add<Output = T> + Copy,
{
type Output = Vecc<T>;
fn add(self, rhs: Vecc<T>) -> Self::Output {
Vecc {
x: self.x.add(rhs.x),
y: self.y.add(rhs.y),
}
}
}
// Borrowed & borrowed.
impl<T> Add<&Vecc<T>> for &Vecc<T>
where
T: Add<Output = T> + Copy,
{
type Output = Vecc<T>;
fn add(self, rhs: &Vecc<T>) -> Self::Output {
Vecc {
x: self.x.add(rhs.x),
y: self.y.add(rhs.y),
}
}
}
// Sub.
// Owned & owned.
impl<T> Sub<Vecc<T>> for Vecc<T>
where
T: Sub<Output = T>,
{
type Output = Vecc<T>;
fn sub(self, rhs: Vecc<T>) -> Self::Output {
Vecc {
x: self.x.sub(rhs.x),
y: self.y.sub(rhs.y),
}
}
}
// Owned & borrowed.
impl<T> Sub<&Vecc<T>> for Vecc<T>
where
T: Sub<Output = T> + Copy,
{
type Output = Vecc<T>;
fn sub(self, rhs: &Vecc<T>) -> Self::Output {
Vecc {
x: self.x.sub(rhs.x),
y: self.y.sub(rhs.y),
}
}
}
// Borrowed & owned.
impl<T> Sub<Vecc<T>> for &Vecc<T>
where
T: Sub<Output = T> + Copy,
{
type Output = Vecc<T>;
fn sub(self, rhs: Vecc<T>) -> Self::Output {
Vecc {
x: self.x.sub(rhs.x),
y: self.y.sub(rhs.y),
}
}
}
// Borrowed & borrowed.
impl<T> Sub<&Vecc<T>> for &Vecc<T>
where
T: Sub<Output = T> + Copy,
{
type Output = Vecc<T>;
fn sub(self, rhs: &Vecc<T>) -> Self::Output {
Vecc {
x: self.x.sub(rhs.x),
y: self.y.sub(rhs.y),
}
}
}
// Mul with T.
// Owned & owned.
impl<T> Mul<T> for Vecc<T>
where
T: Mul<Output = T> + Copy,
{
type Output = Vecc<T>;
fn mul(self, rhs: T) -> Self::Output {
Vecc {
x: self.x.mul(rhs),
y: self.y.mul(rhs),
}
}
}
// Owned & borrowed.
impl<T> Mul<&T> for Vecc<T>
where
T: Mul<Output = T> + Copy,
{
type Output = Vecc<T>;
fn mul(self, rhs: &T) -> Self::Output {
Vecc {
x: self.x.mul(*rhs),
y: self.y.mul(*rhs),
}
}
}
// Borrowed & owned.
impl<T> Mul<T> for &Vecc<T>
where
T: Mul<Output = T> + Copy,
{
type Output = Vecc<T>;
fn mul(self, rhs: T) -> Self::Output {
Vecc {
x: self.x.mul(rhs),
y: self.y.mul(rhs),
}
}
}
// Borrowed & borrowed.
impl<T> Mul<&T> for &Vecc<T>
where
T: Mul<Output = T> + Copy,
{
type Output = Vecc<T>;
fn mul(self, rhs: &T) -> Self::Output {
Vecc {
x: self.x.mul(*rhs),
y: self.y.mul(*rhs),
}
}
}
// Div with T.
// Owned & owned.
impl<T> Div<T> for Vecc<T>
where
T: Div<Output = T> + Copy,
{
type Output = Vecc<T>;
fn div(self, rhs: T) -> Self::Output {
Vecc {
x: self.x.div(rhs),
y: self.y.div(rhs),
}
}
}
// Owned & borrowed.
impl<T> Div<&T> for Vecc<T>
where
T: Div<Output = T> + Copy,
{
type Output = Vecc<T>;
fn div(self, rhs: &T) -> Self::Output {
Vecc {
x: self.x.div(*rhs),
y: self.y.div(*rhs),
}
}
}
// Borrowed & owned.
impl<T> Div<T> for &Vecc<T>
where
T: Div<Output = T> + Copy,
{
type Output = Vecc<T>;
fn div(self, rhs: T) -> Self::Output {
Vecc {
x: self.x.div(rhs),
y: self.y.div(rhs),
}
}
}
// Borrowed & borrowed.
impl<T> Div<&T> for &Vecc<T>
where
T: Div<Output = T> + Copy,
{
type Output = Vecc<T>;
fn div(self, rhs: &T) -> Self::Output {
Vecc {
x: self.x.div(*rhs),
y: self.y.div(*rhs),
}
}
}
// Rem.
// Owned & owned.
impl<T> Rem<Vecc<T>> for Vecc<T>
where
T: Rem<Output = T> + Notf64,
{
type Output = Vecc<T>;
fn rem(self, rhs: Vecc<T>) -> Self::Output {
Vecc {
x: self.x.rem(rhs.x),
y: self.y.rem(rhs.y),
}
}
}
// Owned & borrowed.
impl<T> Rem<&Vecc<T>> for Vecc<T>
where
T: Rem<Output = T> + Copy + Notf64,
{
type Output = Vecc<T>;
fn rem(self, rhs: &Vecc<T>) -> Self::Output {
Vecc {
x: self.x.rem(rhs.x),
y: self.y.rem(rhs.y),
}
}
}
// Borrowed & owned.
impl<T> Rem<Vecc<T>> for &Vecc<T>
where
T: Rem<Output = T> + Copy + Notf64,
{
type Output = Vecc<T>;
fn rem(self, rhs: Vecc<T>) -> Self::Output {
Vecc {
x: self.x.rem(rhs.x),
y: self.y.rem(rhs.y),
}
}
}
// Borrowed & borrowed.
impl<T> Rem<&Vecc<T>> for &Vecc<T>
where
T: Rem<Output = T> + Copy + Notf64,
{
type Output = Vecc<T>;
fn rem(self, rhs: &Vecc<T>) -> Self::Output {
Vecc {
x: self.x.rem(rhs.x),
y: self.y.rem(rhs.y),
}
}
}
// Rem with T.
// Owned & owned.
impl<T> Rem<T> for Vecc<T>
where
T: Rem<Output = T> + Copy + Notf64,
{
type Output = Vecc<T>;
fn rem(self, rhs: T) -> Self::Output {
Vecc {
x: self.x.rem(rhs),
y: self.y.rem(rhs),
}
}
}
// Owned & borrowed.
impl<T> Rem<&T> for Vecc<T>
where
T: Rem<Output = T> + Copy + Notf64,
{
type Output = Vecc<T>;
fn rem(self, rhs: &T) -> Self::Output {
Vecc {
x: self.x.rem(*rhs),
y: self.y.rem(*rhs),
}
}
}
// Borrowed & owned.
impl<T> Rem<T> for &Vecc<T>
where
T: Rem<Output = T> + Copy + Notf64,
{
type Output = Vecc<T>;
fn rem(self, rhs: T) -> Self::Output {
Vecc {
x: self.x.rem(rhs),
y: self.y.rem(rhs),
}
}
}
// Borrowed & borrowed.
impl<T> Rem<&T> for &Vecc<T>
where
T: Rem<Output = T> + Copy + Notf64,
{
type Output = Vecc<T>;
fn rem(self, rhs: &T) -> Self::Output {
Vecc {
x: self.x.rem(*rhs),
y: self.y.rem(*rhs),
}
}
}
// *Assign.
// AddAssign.
// Owned.
impl<T> AddAssign<Vecc<T>> for Vecc<T>
where
T: AddAssign<T>,
{
fn add_assign(&mut self, other: Vecc<T>) {
self.x.add_assign(other.x);
self.y.add_assign(other.y);
}
}
// Borrowed.
impl<T> AddAssign<&Vecc<T>> for Vecc<T>
where
T: AddAssign<T> + Copy,
{
fn add_assign(&mut self, other: &Vecc<T>) {
self.x.add_assign(other.x);
self.y.add_assign(other.y);
}
}
// SubAssign.
// Owned.
impl<T> SubAssign<Vecc<T>> for Vecc<T>
where
T: SubAssign<T>,
{
fn sub_assign(&mut self, rhs: Vecc<T>) {
self.x.sub_assign(rhs.x);
self.y.sub_assign(rhs.y);
}
}
// Borrowed.
impl<T> SubAssign<&Vecc<T>> for Vecc<T>
where
T: SubAssign<T> + Copy,
{
fn sub_assign(&mut self, rhs: &Vecc<T>) {
self.x.sub_assign(rhs.x);
self.y.sub_assign(rhs.y);
}
}
// MulAssign with T.
// Owned.
impl<T> MulAssign<T> for Vecc<T>
where
T: MulAssign<T> + Copy,
{
fn mul_assign(&mut self, rhs: T) {
self.x.mul_assign(rhs);
self.y.mul_assign(rhs);
}
}
// Borrowed.
impl<T> MulAssign<&T> for Vecc<T>
where
T: MulAssign<T> + Copy,
{
fn mul_assign(&mut self, rhs: &T) {
self.x.mul_assign(*rhs);
self.y.mul_assign(*rhs);
}
}
// DivAssign with T.
// Owned.
impl<T> DivAssign<T> for Vecc<T>
where
T: DivAssign<T> + Copy,
{
fn div_assign(&mut self, rhs: T) {
self.x.div_assign(rhs);
self.y.div_assign(rhs);
}
}
// Borrowed.
impl<T> DivAssign<&T> for Vecc<T>
where
T: DivAssign<T> + Copy,
{
fn div_assign(&mut self, rhs: &T) {
self.x.div_assign(*rhs);
self.y.div_assign(*rhs);
}
}
// RemAssign.
// Owned.
impl<T> RemAssign<Vecc<T>> for Vecc<T>
where
T: RemAssign<T> + Notf64,
{
fn rem_assign(&mut self, rhs: Vecc<T>) {
self.x.rem_assign(rhs.x);
self.y.rem_assign(rhs.y);
}
}
// Borrowed.
impl<T> RemAssign<&Vecc<T>> for Vecc<T>
where
T: RemAssign<T> + Copy + Notf64,
{
fn rem_assign(&mut self, rhs: &Vecc<T>) {
self.x.rem_assign(rhs.x);
self.y.rem_assign(rhs.y);
}
}
// RemAssign with T.
// Owned.
impl<T> RemAssign<T> for Vecc<T>
where
T: RemAssign<T> + Copy + Notf64,
{
fn rem_assign(&mut self, rhs: T) {
self.x.rem_assign(rhs);
self.y.rem_assign(rhs);
}
}
// Borrowed.
impl<T> RemAssign<&T> for Vecc<T>
where
T: RemAssign<T> + Copy + Notf64,
{
fn rem_assign(&mut self, rhs: &T) {
self.x.rem_assign(*rhs);
self.y.rem_assign(*rhs);
}
}
|
use super::button::Button;
use serde::ser::{Serialize, Serializer, SerializeStruct};
use serde_json::Value;
pub trait Card: Send + Sync {
fn to_json(&self) -> Value;
fn typed(&self) -> &'static str ;
}
#[derive(Clone)]
pub struct DefaultAction {
status: &'static str,
url: String,
//title: String,
}
impl Card for DefaultAction {
fn to_json(&self) -> Value {
json!(self)
}
fn typed(&self) -> &'static str {
"none"
}
}
impl DefaultAction {
pub fn new(title: &str, url: &str) -> Self {
DefaultAction{
status: "web_url",
url: String::from(url),
//title: String::from(title),
}
}
/*pub fn to_json(&self) -> String {
format!(r#"{{"type":"{}","url":"{}","title":"{}"}}"#,self.status,self.url,self.title)
}*/
}
impl Serialize for DefaultAction {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer,
{
let mut state = serializer.serialize_struct("DefaultAction", 3)?;
state.serialize_field("type", &self.status)?;
state.serialize_field("url", &self.url)?;
state.end()
}
}
#[derive(Clone)]
pub struct CardGeneric {
title: String,
subtitle: Option<String>,
image_url: Option<String>,
buttons: Option<Vec<Button>>,
default_action: Option<DefaultAction> // Accept an url. When the card was tapped we send an url
}
impl Card for CardGeneric {
fn to_json(&self) -> Value {
json!( self )
}
fn typed(&self) -> &'static str {
"generic"
}
}
impl Serialize for CardGeneric {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer,
{
let mut state = serializer.serialize_struct("CardGeneric", 5)?;
state.serialize_field("title", &self.title)?;
state.serialize_field("subtitle", &self.subtitle.clone().unwrap())?;
if self.image_url.is_some() {
state.serialize_field("image_url", &self.image_url.clone().unwrap())?;
}
if self.default_action.is_some() {
state.serialize_field("default_action", &self.default_action.clone().unwrap())?;
}
if self.buttons.is_some() {
state.serialize_field("buttons", &self.buttons.clone().unwrap())?;
}
state.end()
}
}
impl CardGeneric {
pub fn new(title: &str) -> Self {
CardGeneric{
title: String::from(title),
subtitle: None,
image_url: None,
buttons: None,
default_action: None,
}
}
pub fn subtitle(mut self, subtitle: &str) -> Self {
self.subtitle = Some(String::from(subtitle));
self
}
pub fn image(mut self, url: &str) -> Self {
self.image_url = Some(String::from(url));
self
}
pub fn default_action(mut self, default_action: DefaultAction) -> Self {
self.default_action = Some(default_action);
self
}
pub fn button(mut self, button: Button) -> Self {
match &mut self.buttons {
Some(e) => e.push(button),
None => self.buttons = Some(vec!(button))
}
self
}
}
pub struct CardButtons {
text: String,
buttons: Option<Vec<Button>>,
}
impl Card for CardButtons {
fn to_json(&self) -> Value {
json!({ "template_type":"button" , "text": self.text , "buttons" : self.buttons.clone().unwrap() })
}
fn typed(&self) -> &'static str {
"buttons"
}
}
impl CardButtons {
pub fn new(text: &str) -> Self {
CardButtons{
text: String::from(text),
buttons: None,
}
}
pub fn button(mut self, button: Button) -> Self {
match &mut self.buttons {
Some(e) => e.push(button),
None => self.buttons = Some(vec!(button))
}
self
}
} |
use necsim_core_bond::{NonNegativeF64, PositiveF64};
use crate::{
cogs::{Habitat, LineageReference, LineageStore, RngCore},
landscape::{IndexedLocation, Location},
simulation::partial::emigration_exit::PartialSimulation,
};
#[allow(
clippy::inline_always,
clippy::inline_fn_without_body,
clippy::too_many_arguments
)]
#[contract_trait]
pub trait EmigrationExit<H: Habitat, G: RngCore, R: LineageReference<H>, S: LineageStore<H, R>>:
crate::cogs::Backup + core::fmt::Debug
{
#[must_use]
#[debug_ensures(match &ret {
Some((
ret_lineage_reference,
ret_dispersal_origin,
ret_dispersal_target,
ret_prior_time,
ret_event_time,
)) => {
ret_lineage_reference == &old(lineage_reference.clone()) &&
ret_dispersal_origin == &old(dispersal_origin.clone()) &&
ret_dispersal_target == &old(dispersal_target.clone()) &&
ret_prior_time == &old(prior_time) &&
ret_event_time == &old(event_time)
},
None => true,
}, "if ret is Some, it returns the input parameters unchanged")]
#[debug_ensures(if ret.as_ref().is_none() {
simulation.lineage_store.get(old(lineage_reference.clone())).is_none()
} else { true }, "if ret is None, lineage_reference has been removed from the lineage store")]
fn optionally_emigrate(
&mut self,
lineage_reference: R,
dispersal_origin: IndexedLocation,
dispersal_target: Location,
prior_time: NonNegativeF64,
event_time: PositiveF64,
simulation: &mut PartialSimulation<H, G, R, S>,
rng: &mut G,
) -> Option<(R, IndexedLocation, Location, NonNegativeF64, PositiveF64)>;
}
|
extern crate chrono;
extern crate dirs;
mod routine;
mod story;
use routine::Routine;
use story::Story;
use std::fs::File;
use std::fs;
use chrono::prelude::*;
fn main() {
// load existing -> all in home directory
// states -> empty, some
let mut story = get_story().expect("Error in story");
let test_routine = Routine {
id: 2,
name: String::from("Cooking"),
last_done: Local::now()
};
//last_done: Local.ymd(2020, 1, 1).and_hms(0, 0, 0)
//Local::now()
story.add_routine(test_routine);
println!("Story after add : {}", story);
match story.routines.len() {
0 => {
println!("It was empty");
}
_ => {
println!("Not empty");
}
}
}
fn print_routines(routines: &Vec<Routine>) {
routines.iter().for_each(|routine| {
println!("{}", routine);
});
}
fn get_story() -> Option<Story> {
let mut home_story = dirs::home_dir().expect("Error in getting Home Dir");
home_story.push(".rudhi");
home_story.push("story");
let exists = home_story.exists();
if exists {
let content = fs::read_to_string(home_story).expect("Error reading file");
let story: Story = serde_json::from_str(&content).expect("Error in deserializing");
return Some(story);
}
println!("Home : {}", exists);
return None;
}
/* Will be done later
struct Todo {
name: String,
added: Date<Local> // should be date
}
struct Thought {
name: String,
description: String,
added: Date<Local>
}
*/
|
/*
Copyright 2021 Volt Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
//! Remove a package from your direct dependencies.
// Std Imports
use std::sync::Arc;
// Library Imports
use anyhow::Result;
use async_trait::async_trait;
use colored::Colorize;
use std::process;
// Crate Level Imports
use crate::utils::App;
use crate::VERSION;
// Super Imports
use super::Command;
/// Struct implementation for the `Deploy` command.
pub struct Deploy;
#[async_trait]
impl Command for Deploy {
/// Display a help menu for the `volt deploy` command.
fn help() -> String {
format!(
r#"volt {}
Deploys your commit to Github.
Usage: {} {} {}
Options:
{} {} Output verbose messages on internal operations."#,
VERSION.bright_green().bold(),
"volt".bright_green().bold(),
"deploy".bright_purple(),
"[commit]".white(),
"--verbose".blue(),
"(-v)".yellow()
)
}
/// Execute the `volt deploy` command
///
/// Removes a package from your direct dependencies.
/// ## Arguments
/// * `commit_msg` - Name of the commit message.
/// ## Examples
/// ```
/// // .exec() is an async call so you need to await it
/// Create.exec(app, vec![], vec!["--verbose"]).await;
/// ```
/// ## Returns
/// * `Result<()>`
async fn exec(app: Arc<App>) -> Result<()> {
let args: Vec<String> = app.args.clone();
if args.len() < 1 {
println!("{} expected commit name", "error".bright_red());
process::exit(1);
}
else {
let commit_msg = &args[0];
std::env::set_current_dir(std::env::current_dir()?)?;
// println!("current dir: {:?}", std::env::current_dir()?);
std::process::Command::new("git").args(&["add", "."]).output().expect("Failed to add");
std::process::Command::new("git").args(&["commit", "-m", commit_msg.as_str()]).output().expect("Failed to commit");
std::process::Command::new("git").args(&["push"]).output().expect("Failed to push");
}
Ok(())
}
}
|
use winapi_safe::constants::*;
use winapi_safe::*;
const THRESH: i32 = 40;
// Enumerate Windows Handler
fn enum_handler(m_hwnd: HWND, o_hwnd: HWND, mut m_rect: RECT) -> i32 {
// Ignore minimized windows.
if let Ok(ret) = is_window_minimized(o_hwnd) {
if ret == true {
return 1;
}
} else {
return 1; // Drop-out if API call failed.
}
// Ignore maximized windows.
if let Ok(ret) = is_window_maximized(o_hwnd) {
if ret == true {
return 1;
}
} else {
return 1; // Drop-out if API call failed.
}
// Ignore non-taskbar windows.
if !is_taskbar_window(o_hwnd) {
return 1; // Return 1 to continue enumerating.
}
// Get bounds of enumerated window.
if let Ok(o_rect) = get_window_frame_rect(o_hwnd) {
//
let (thread_id, process_id) = get_window_thread_process_id(o_hwnd as HWND);
println!(
"{:?} {} {} {} {} id t:{} p:{}",
o_hwnd, o_rect.left, o_rect.top, o_rect.right, o_rect.bottom, thread_id, process_id
);
// Compare positions and snap windows that are close by.
let mut reposition = false;
if i32::abs(m_rect.right - o_rect.left) < THRESH {
println!("Window on left");
m_rect.right = o_rect.left;
reposition = true;
} else if i32::abs(m_rect.left - o_rect.right) < THRESH {
println!("Window on right");
m_rect.left = o_rect.right;
reposition = true;
} else if i32::abs(m_rect.bottom - o_rect.top) < THRESH {
println!("Window on top");
m_rect.bottom = o_rect.top;
reposition = true;
} else if i32::abs(m_rect.top - o_rect.bottom) < THRESH {
println!("Window on bottom");
m_rect.top = o_rect.bottom;
reposition = true;
}
// Apply new position.
if reposition {
if let Err(err) = set_window_pos(m_hwnd, m_rect) {
println!("{}", err);
} else {
return 0; // Stop enumerating.
}
}
}
return 1; // Return 1 to continue enumerating.
}
// System Event Handler
fn event_handler(event: u32, m_hwnd: HWND, id_child: i32) {
// Return if the event isn't for us.
if event != EVENT_SYSTEM_MOVESIZEEND || id_child != 0 {
return;
}
// Retrieve bounds for the moved window or return if failed.
let m_rect = get_window_frame_rect(m_hwnd);
match m_rect {
| Ok(m_rect) => {
// Setup closure for EnumWindow callback. Done this way for readability.
let enum_closure = |o_hwnd| -> i32 { enum_handler(m_hwnd, o_hwnd, m_rect) };
println!("\n========\n");
// Enumerate windows.
if let Err(err) = enum_windows(enum_closure) {
println!("{}", err)
}
}
| Err(err) => println!("{}", err),
};
}
fn main() {
// Set the process as DPI aware.
if let Err(err) = set_process_dpi_aware_context(DPI_AWARENESS_CONTEXT_SYSTEM_AWARE) {
msgbox!("Error", "{}", err);
}
// Setup closure for event hook. Done this way for readability.
let func = |_, event, hwnd, _, id_child, _, _| {
event_handler(event, hwnd, id_child);
};
// Setup hook.
if let Err(err) = set_win_event_hook(
EVENT_SYSTEM_MOVESIZEEND,
EVENT_SYSTEM_MOVESIZEEND,
0 as HINSTANCE,
func,
0,
0,
WINEVENT_OUTOFCONTEXT,
) {
msgbox!("Error", "{}", err);
return;
}
// Run safe windows message pump.
loop {
// Wait for message (blocking).
if let Ok(msg) = get_message() {
if let Some(msg) = msg {
// Handle message.
translate_message(&msg);
dispatch_message(&msg);
} else {
return; // Return on WM_QUIT.
}
} else {
return; // Return on error.
}
}
}
|
use crate::commands::osu::ProfileSize;
use rosu_v2::prelude::{GameMode, Username};
use smallstr::SmallString;
use smallvec::SmallVec;
pub type Prefix = SmallString<[u8; 2]>;
pub type Prefixes = SmallVec<[Prefix; 5]>;
pub type Authorities = SmallVec<[u64; 4]>;
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[repr(u8)]
pub enum EmbedsSize {
AlwaysMinimized = 0,
InitialMaximized = 1,
AlwaysMaximized = 2,
}
impl From<i16> for EmbedsSize {
fn from(value: i16) -> Self {
match value {
0 => Self::AlwaysMinimized,
2 => Self::AlwaysMaximized,
_ => Self::InitialMaximized,
}
}
}
impl Default for EmbedsSize {
fn default() -> Self {
Self::InitialMaximized
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
#[repr(u8)]
pub enum MinimizedPp {
IfFc = 0,
Max = 1,
}
impl From<i16> for MinimizedPp {
fn from(value: i16) -> Self {
match value {
0 => Self::IfFc,
_ => Self::Max,
}
}
}
impl Default for MinimizedPp {
fn default() -> Self {
Self::Max
}
}
#[derive(Debug, Clone)]
pub struct GuildConfig {
pub authorities: Authorities,
pub embeds_size: Option<EmbedsSize>,
pub minimized_pp: Option<MinimizedPp>,
pub prefixes: Prefixes,
pub profile_size: Option<ProfileSize>,
pub show_retries: Option<bool>,
pub track_limit: Option<u8>,
pub with_lyrics: Option<bool>,
}
impl GuildConfig {
pub fn with_lyrics(&self) -> bool {
self.with_lyrics.unwrap_or(true)
}
pub fn embeds_size(&self) -> EmbedsSize {
self.embeds_size.unwrap_or_default()
}
pub fn minimized_pp(&self) -> MinimizedPp {
self.minimized_pp.unwrap_or_default()
}
pub fn profile_size(&self) -> ProfileSize {
self.profile_size.unwrap_or_default()
}
pub fn show_retries(&self) -> bool {
self.show_retries.unwrap_or(true)
}
pub fn track_limit(&self) -> u8 {
self.track_limit.unwrap_or(50)
}
}
impl Default for GuildConfig {
fn default() -> Self {
GuildConfig {
authorities: SmallVec::new(),
embeds_size: None,
minimized_pp: None,
prefixes: smallvec!["<".into()],
profile_size: None,
show_retries: None,
track_limit: None,
with_lyrics: None,
}
}
}
#[derive(Clone, Debug)]
pub enum OsuData {
Name(Username),
User { user_id: u32, username: Username },
}
impl OsuData {
pub fn username(&self) -> &Username {
match self {
Self::Name(username) => username,
Self::User { username, .. } => username,
}
}
pub fn into_username(self) -> Username {
match self {
Self::Name(username) => username,
Self::User { username, .. } => username,
}
}
pub fn user_id(&self) -> Option<u32> {
match self {
Self::Name(_) => None,
Self::User { user_id, .. } => Some(*user_id),
}
}
}
impl From<Username> for OsuData {
fn from(name: Username) -> Self {
Self::Name(name)
}
}
impl From<String> for OsuData {
fn from(name: String) -> Self {
Self::Name(name.into())
}
}
#[derive(Clone, Debug, Default)]
pub struct UserConfig {
pub embeds_size: Option<EmbedsSize>,
pub minimized_pp: Option<MinimizedPp>,
pub mode: Option<GameMode>,
pub osu: Option<OsuData>,
pub profile_size: Option<ProfileSize>,
pub show_retries: Option<bool>,
pub twitch_id: Option<u64>,
}
impl UserConfig {
pub fn username(&self) -> Option<&Username> {
self.osu.as_ref().map(OsuData::username)
}
pub fn into_username(self) -> Option<Username> {
self.osu.map(OsuData::into_username)
}
pub fn embeds_size(&self) -> EmbedsSize {
self.embeds_size.unwrap_or_default()
}
pub fn minimized_pp(&self) -> MinimizedPp {
self.minimized_pp.unwrap_or_default()
}
}
|
use sys;
use bullet_vector3::BulletVector3;
use collision::collision_shapes::Shape;
use mint::{Vector3, Vector4};
#[repr(u8)]
pub enum ActivationState {
/// Means active so that the object having the state could be moved in a step simulation.
/// This is the "normal" state for an object to be in.
/// Use btCollisionObject::activate() to activate an object,
/// not btCollisionObject::setActivationState(ACTIVATE_TAG),
/// or it may get disabled again right away, as the deactivation timer has not been reset.
ActiveTag = 1,
/// Makes a body active forever, used for something like a player-controlled object.
DisableDeactivation = 4,
/// Making a body deactivated forever.
DisableSimulation = 5,
/// Means the body, and it's island, are asleep, since Bullet sleeps objects per-island. You probably don't want or need to set this one manually.
IslandSleeping = 2,
/// Means that it's an active object trying to fall asleep,
/// and Bullet is keeping an eye on its velocity for the next few frames
/// to see if it's a good candidate.
/// You probably don't want or need to set this one manually.
WantsDeactivation = 3,
}
pub struct RigidBody {
rigid_body: Box<sys::btRigidBody>,
shape: Box<Shape>,
motion_state: Box<sys::btDefaultMotionState>,
construction_info: Box<sys::btRigidBody_btRigidBodyConstructionInfo>,
}
impl RigidBody {
pub fn new<T1: Into<Vector3<f64>>, T2: Into<Vector3<f64>>, T3: Into<Vector4<f64>>>(
mass: f64,
inertia: T1,
shape: Shape,
translation: T2,
orientation: T3,
) -> RigidBody {
let mut inertia: BulletVector3 = inertia.into().into();
let shape_box = Box::new(shape);
let translation: BulletVector3 = translation.into().into();
let orientation: [f64; 4] = orientation.into().into();
let transform = unsafe {
sys::btTransform::new1(
&orientation as *const _ as *const _,
&translation as *const _ as *const _,
)
};
let mut motion_state_box = Box::new(unsafe {
sys::btDefaultMotionState::new(&transform as *const _, sys::btTransform::getIdentity())
});
let construction_info_box = Box::new(unsafe {
sys::btRigidBody_btRigidBodyConstructionInfo::new(
mass,
&mut *motion_state_box as *mut _ as *mut _,
shape_box.as_ptr(),
inertia.0.as_mut_ptr() as *mut _,
)
});
RigidBody {
rigid_body: Box::new(unsafe {
sys::btRigidBody::new(&*construction_info_box as *const _)
}),
shape: shape_box,
motion_state: motion_state_box,
construction_info: construction_info_box,
}
}
pub(crate) unsafe fn motion_state_ptr(&self) -> *mut sys::btDefaultMotionState {
&*self.motion_state as *const _ as *mut _
}
pub(crate) unsafe fn as_ptr(&self) -> *mut sys::btRigidBody {
&*self.rigid_body as *const _ as *mut _
}
}
impl Drop for RigidBody {
fn drop(&mut self) {
unsafe {
::sys::btMotionState_btMotionState_destructor(
&mut *self.motion_state as *mut _ as *mut _,
);
::sys::btRigidBody_btRigidBody_destructor(&mut *self.rigid_body as *mut _);
}
}
}
#[derive(Clone)]
pub struct RigidBodyHandle {
pub(in dynamics) ptr: *mut sys::btRigidBody,
motion_state: *mut sys::btDefaultMotionState,
temp_transform: sys::btTransform,
}
impl RigidBodyHandle {
pub fn new(ptr: *mut sys::btRigidBody, motion_state: *mut sys::btDefaultMotionState) -> Self {
let temp_transform = unsafe { sys::btTransform::new() };
RigidBodyHandle {
ptr,
motion_state,
temp_transform,
}
}
pub fn set_restitution(&mut self, restitution: f64) {
unsafe {
sys::btCollisionObject_setRestitution(self.ptr as *mut _, restitution);
}
}
pub fn set_mass(&mut self, mass: f64) {
unsafe {
let shape = sys::btRigidBody_getCollisionShape(self.ptr);
let mut inertia: [f64; 4] = ::std::mem::uninitialized();
let shape_type = sys::btCollisionShape_getShapeType(shape as * mut _) as u32;
match shape_type {
sys::BroadphaseNativeTypes_STATIC_PLANE_PROXYTYPE => {
sys::btStaticPlaneShape_calculateLocalInertia(
shape as *mut _,
mass,
inertia.as_mut_ptr() as *mut _,
);
},
sys::BroadphaseNativeTypes_BOX_SHAPE_PROXYTYPE => {
sys::btBoxShape_calculateLocalInertia(
shape as *mut _,
mass,
inertia.as_mut_ptr() as *mut _,
);
},
sys::BroadphaseNativeTypes_CAPSULE_SHAPE_PROXYTYPE => {
sys::btCapsuleShape_calculateLocalInertia(
shape as *mut _,
mass,
inertia.as_mut_ptr() as *mut _,
);
},
sys::BroadphaseNativeTypes_CONVEX_HULL_SHAPE_PROXYTYPE => {
sys::btPolyhedralConvexShape_calculateLocalInertia(
shape as *mut _,
mass,
inertia.as_mut_ptr() as *mut _,
);
},
sys::BroadphaseNativeTypes_COMPOUND_SHAPE_PROXYTYPE => {
sys::btCompoundShape_calculateLocalInertia(
shape as *mut _,
mass,
inertia.as_mut_ptr() as *mut _,
);
},
_ => {
unimplemented!()
}
}
sys::btRigidBody_setMassProps(self.ptr as *mut _, mass, inertia.as_ptr() as *const _);
sys::btRigidBody_updateInertiaTensor(self.ptr as *mut _);
}
}
pub fn set_friction(&mut self, friction: f64) {
unsafe {
sys::btCollisionObject_setFriction(self.ptr as *mut _, friction);
}
}
pub fn set_gravity<T: Into<Vector3<f64>>>(&mut self, gravity: T) {
let gravity: BulletVector3 = gravity.into().into();
unsafe {
sys::btRigidBody_setGravity(self.ptr, gravity.0.as_ptr() as *const _);
}
}
pub fn set_angular_factor<T: Into<Vector3<f64>>>(&mut self, angular_factor: T) {
let angular_factor: BulletVector3 = angular_factor.into().into();
unsafe {
sys::btRigidBody_setAngularFactor(self.ptr, angular_factor.0.as_ptr() as *const _);
}
}
pub fn set_sleeping_thresholds(&mut self, linear: f64, angular: f64) {
unsafe { sys::btRigidBody_setSleepingThresholds(self.ptr, linear, angular) }
}
pub fn set_activation_state(&mut self, activation_state: ActivationState) {
unsafe {
sys::btCollisionObject_setActivationState(self.ptr as *mut _, activation_state as i32)
}
}
pub fn apply_central_impulse<T: Into<Vector3<f64>>>(&mut self, impulse: T) {
let impulse: BulletVector3 = impulse.into().into();
unsafe {
sys::btRigidBody_applyCentralImpulse(self.ptr, impulse.0.as_ptr() as *const _);
}
}
pub fn get_linear_velocity(&self) -> Vector3<f64> {
let velocity = unsafe { sys::btRigidBody_getLinearVelocity(self.ptr) };
::bullet_vector3::vector_from_slice(unsafe {
::std::slice::from_raw_parts(velocity as *const _, 4)
})
}
/// Override velocity vector.
pub fn reset_linear_velocity<T>(&mut self, velocity: T)
where
T: Into<Vector3<f64>>,
{
let velocity: BulletVector3 = velocity.into().into();
unsafe {
sys::btRigidBody_setLinearVelocity(self.ptr, velocity.0.as_ptr() as *const _);
}
}
/// Override position vector and rotation quaternion.
pub fn reset_position_and_orientation<T, T1>(&mut self, position: T, orientation: T1)
where
T: Into<Vector3<f64>>,
T1: Into<Vector4<f64>>,
{
let orientation: [f64; 4] = orientation.into().into();
let position: BulletVector3 = position.into().into();
let transform = unsafe {
sys::btTransform::new1(
&orientation as *const _ as *const _,
&position as *const _ as *const _,
)
};
unsafe {
sys::btDefaultMotionState_setWorldTransform(
&*self.motion_state as *const _ as *mut _,
&transform as *const _ as *const _,
);
}
unsafe { (*self.ptr)._base.m_worldTransform = transform };
}
/// Get position in world space and orientation quaternion
pub fn get_world_position_and_orientation(&self) -> (Vector3<f64>, Vector4<f64>) {
unsafe {
sys::btDefaultMotionState_getWorldTransform(
&*self.motion_state as *const _ as *mut _,
&self.temp_transform as *const _ as *mut _,
);
}
let origin = unsafe { self.temp_transform.getOrigin1().as_ref().unwrap() };
let rotation = unsafe { self.temp_transform.getRotation() };
(
::bullet_vector3::vector_from_slice(&origin.m_floats[0..3]),
::bullet_vector3::vector4_from_slice(&rotation._base.m_floats),
)
}
/// Place data on heap as a box and set to rigid body as user pointer
pub fn set_user_data<T: 'static>(&mut self, data: T) {
let data_box = Box::new(data);
unsafe {
sys::btCollisionObject_setUserPointer(
self.ptr as *mut _,
Box::into_raw(data_box) as *mut _,
)
};
}
/// Get data from rigidbody's user pointer
/// Getting data from body without previously setted data is fine.
/// Getting wrong typed data is unsafe and will cause mem::transmute to wrong pointer type.
pub unsafe fn get_user_data<T: 'static>(&self) -> Option<&T> {
let pointer = sys::btCollisionObject_getUserPointer(self.ptr as *mut _);
let pointer: *const T = ::std::mem::transmute(pointer);
return pointer.as_ref();
}
/// Set user index. This will not be used in bullet and this is not related to user_data.
pub fn set_user_index(&mut self, index: i32) {
unsafe { sys::btCollisionObject_setUserIndex(self.ptr as *mut _, index) };
}
/// Get previously setted user index
/// If index was not set - will return "-1"
pub fn get_user_index(&self) -> i32 {
unsafe { sys::btCollisionObject_getUserIndex(self.ptr as *mut _) }
}
/// Was that rigid_body removed with DynamicsWorld::remove_body()
/// Probably may return true also for any not-added bodys
pub fn removed(&self) -> bool {
unsafe { (*self.ptr)._base.m_worldArrayIndex == -1 }
}
pub unsafe fn ptr(&mut self) -> *mut sys::btRigidBody {
self.ptr
}
}
|
use std::io;
use std::fs;
use std::cmp::Ordering;
use std::collections::HashMap;
use nom::bytes::complete::tag;
use nom::character::complete::{digit1, space1, alpha1};
use nom::combinator::map;
use nom::sequence::pair;
use nom::multi::separated_nonempty_list;
use nom::IResult;
#[derive(Debug, Default, Clone)]
struct Input {
name: String,
count: usize,
}
#[derive(Debug, Default)]
struct Reaction {
output_count: usize,
inputs: Vec<Input>,
spares: usize,
}
const MAX_ORE: usize = 1000000000000;
fn parse_num(input: &str) -> IResult<&str, usize> {
map(digit1, |digit_str: &str| digit_str.parse::<usize>().unwrap())(input)
}
fn parse_input(input: &str) -> IResult<&str, Input> {
let (input, count) = parse_num(input)?;
let (input, _) = space1(input)?;
let (input, name) = alpha1(input)?;
Ok((input, Input{name: name.to_string(), count}))
}
fn parse_reaction(input: &str) -> IResult<&str, (String, Reaction)> {
let (input, inputs) = separated_nonempty_list(pair(tag(","), space1), parse_input)(input)?;
let (input, _) = space1(input)?;
let (input, _) = tag("=>")(input)?;
let (input, _) = space1(input)?;
let (input, output_count) = parse_num(input)?;
let (input, _) = space1(input)?;
let (input, name) = alpha1(input)?;
Ok((input, (name.to_string(), Reaction{output_count, inputs, spares: 0})))
}
fn string_to_reactions(string: &str) -> HashMap<String, Reaction> {
let mut reactions: HashMap<String, Reaction> = HashMap::new();
for line in string.lines() {
let (_, (name, reaction)) = parse_reaction(line).unwrap();
reactions.insert(name, reaction);
}
reactions
}
fn breakdown(reactions: &mut HashMap<String, Reaction>, output: &String, mut needed: usize) -> usize {
if output == "ORE" {
return needed;
}
let reaction = reactions.get(output).unwrap();
if needed <= reaction.spares {
if let Some(reaction) = reactions.get_mut(output) {
reaction.spares -= needed;
return 0;
} else {
panic!("No reaction for {} found", output);
}
}
if reaction.spares > 0 {
needed -= reaction.spares;
}
let mut multiplier: usize;
if reaction.output_count >= needed {
multiplier = 1;
} else {
multiplier = needed / reaction.output_count;
if needed % reaction.output_count > 0 {
multiplier += 1;
}
}
let inputs = reaction.inputs.clone();
let total = inputs.iter().map(|input| breakdown(reactions, &input.name, input.count * multiplier)).sum();
if let Some(reaction) = reactions.get_mut(output) {
reaction.spares = (reaction.output_count * multiplier) - needed;
} else {
panic!("No reaction for {} found", output);
}
total
}
fn approx_breakdown(reactions: &HashMap<String, Reaction>, output: &String, needed: f32) -> f32 {
if output == "ORE" {
return needed;
}
let reaction = reactions.get(output).unwrap();
let multiplier = needed / reaction.output_count as f32;
let inputs = reaction.inputs.clone();
inputs.iter().map(|input| approx_breakdown(reactions, &input.name, input.count as f32 * multiplier)).sum()
}
fn output_for_size(mut reactions: &mut HashMap<String, Reaction>, output: &String) -> usize {
let mut needed = MAX_ORE / approx_breakdown(&reactions, output, 1 as f32) as usize;
println!("starting at {}", needed);
let ore = breakdown(&mut reactions, output, needed);
let mut adjustment: usize = 0;
println!("ore count: {:?}", ore);
match ore.cmp(&MAX_ORE) {
Ordering::Less => {
adjustment = 1;
while breakdown(&mut reactions, output, needed) < MAX_ORE {
needed += 1;
}
},
Ordering::Greater => {
while breakdown(&mut reactions, output, needed) > MAX_ORE {
needed -= 1;
}
}
Ordering::Equal => return needed,
}
needed - adjustment
}
fn main() -> io::Result<()> {
let mut reactions = string_to_reactions(&fs::read_to_string("input.txt").unwrap());
println!("{}", output_for_size(&mut reactions, &"FUEL".to_string()));
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use test_case::test_case;
#[test_case("10 ORE => 10 A
1 ORE => 1 B
7 A, 1 B => 1 C
7 A, 1 C => 1 D
7 A, 1 D => 1 E
7 A, 1 E => 1 FUEL", 1, "FUEL" => 31 ; "example 1")]
#[test_case("9 ORE => 2 A
8 ORE => 3 B
7 ORE => 5 C
3 A, 4 B => 1 AB
5 B, 7 C => 1 BC
4 C, 1 A => 1 CA
2 AB, 3 BC, 4 CA => 1 FUEL", 1, "FUEL" => 165 ; "example 2")]
#[test_case("157 ORE => 5 NZVS
165 ORE => 6 DCFZ
44 XJWVT, 5 KHKGT, 1 QDVJ, 29 NZVS, 9 GPVTF, 48 HKGWZ => 1 FUEL
12 HKGWZ, 1 GPVTF, 8 PSHF => 9 QDVJ
179 ORE => 7 PSHF
177 ORE => 5 HKGWZ
7 DCFZ, 7 PSHF => 2 XJWVT
165 ORE => 2 GPVTF
3 DCFZ, 7 NZVS, 5 HKGWZ, 10 PSHF => 8 KHKGT", 1, "FUEL" => 13312 ; "example 3")]
#[test_case("2 VPVL, 7 FWMGM, 2 CXFTF, 11 MNCFX => 1 STKFG
17 NVRVD, 3 JNWZP => 8 VPVL
53 STKFG, 6 MNCFX, 46 VJHF, 81 HVMC, 68 CXFTF, 25 GNMV => 1 FUEL
22 VJHF, 37 MNCFX => 5 FWMGM
139 ORE => 4 NVRVD
144 ORE => 7 JNWZP
5 MNCFX, 7 RFSQX, 2 FWMGM, 2 VPVL, 19 CXFTF => 3 HVMC
5 VJHF, 7 MNCFX, 9 VPVL, 37 CXFTF => 6 GNMV
145 ORE => 6 MNCFX
1 NVRVD => 8 CXFTF
1 VJHF, 6 MNCFX => 4 RFSQX
176 ORE => 6 VJHF", 1, "FUEL" => 180697 ; "example 4")]
#[test_case("171 ORE => 8 CNZTR
7 ZLQW, 3 BMBT, 9 XCVML, 26 XMNCP, 1 WPTQ, 2 MZWV, 1 RJRHP => 4 PLWSL
114 ORE => 4 BHXH
14 VRPVC => 6 BMBT
6 BHXH, 18 KTJDG, 12 WPTQ, 7 PLWSL, 31 FHTLT, 37 ZDVW => 1 FUEL
6 WPTQ, 2 BMBT, 8 ZLQW, 18 KTJDG, 1 XMNCP, 6 MZWV, 1 RJRHP => 6 FHTLT
15 XDBXC, 2 LTCX, 1 VRPVC => 6 ZLQW
13 WPTQ, 10 LTCX, 3 RJRHP, 14 XMNCP, 2 MZWV, 1 ZLQW => 1 ZDVW
5 BMBT => 4 WPTQ
189 ORE => 9 KTJDG
1 MZWV, 17 XDBXC, 3 XCVML => 2 XMNCP
12 VRPVC, 27 CNZTR => 2 XDBXC
15 KTJDG, 12 BHXH => 5 XCVML
3 BHXH, 2 VRPVC => 7 MZWV
121 ORE => 7 VRPVC
7 XCVML => 6 RJRHP
5 BHXH, 4 VRPVC => 5 LTCX", 1, "FUEL" => 2210736 ; "example 5")]
fn test(input: &str, needed: usize, name: &str) -> usize {
let mut reactions = string_to_reactions(input);
breakdown(&mut reactions, &name.to_string(), needed)
}
#[test_case("157 ORE => 5 NZVS
165 ORE => 6 DCFZ
44 XJWVT, 5 KHKGT, 1 QDVJ, 29 NZVS, 9 GPVTF, 48 HKGWZ => 1 FUEL
12 HKGWZ, 1 GPVTF, 8 PSHF => 9 QDVJ
179 ORE => 7 PSHF
177 ORE => 5 HKGWZ
7 DCFZ, 7 PSHF => 2 XJWVT
165 ORE => 2 GPVTF
3 DCFZ, 7 NZVS, 5 HKGWZ, 10 PSHF => 8 KHKGT", "FUEL" => 82892753 ; "example 1")]
#[test_case("2 VPVL, 7 FWMGM, 2 CXFTF, 11 MNCFX => 1 STKFG
17 NVRVD, 3 JNWZP => 8 VPVL
53 STKFG, 6 MNCFX, 46 VJHF, 81 HVMC, 68 CXFTF, 25 GNMV => 1 FUEL
22 VJHF, 37 MNCFX => 5 FWMGM
139 ORE => 4 NVRVD
144 ORE => 7 JNWZP
5 MNCFX, 7 RFSQX, 2 FWMGM, 2 VPVL, 19 CXFTF => 3 HVMC
5 VJHF, 7 MNCFX, 9 VPVL, 37 CXFTF => 6 GNMV
145 ORE => 6 MNCFX
1 NVRVD => 8 CXFTF
1 VJHF, 6 MNCFX => 4 RFSQX
176 ORE => 6 VJHF", "FUEL" => 5586022 ; "example 2")]
#[test_case("171 ORE => 8 CNZTR
7 ZLQW, 3 BMBT, 9 XCVML, 26 XMNCP, 1 WPTQ, 2 MZWV, 1 RJRHP => 4 PLWSL
114 ORE => 4 BHXH
14 VRPVC => 6 BMBT
6 BHXH, 18 KTJDG, 12 WPTQ, 7 PLWSL, 31 FHTLT, 37 ZDVW => 1 FUEL
6 WPTQ, 2 BMBT, 8 ZLQW, 18 KTJDG, 1 XMNCP, 6 MZWV, 1 RJRHP => 6 FHTLT
15 XDBXC, 2 LTCX, 1 VRPVC => 6 ZLQW
13 WPTQ, 10 LTCX, 3 RJRHP, 14 XMNCP, 2 MZWV, 1 ZLQW => 1 ZDVW
5 BMBT => 4 WPTQ
189 ORE => 9 KTJDG
1 MZWV, 17 XDBXC, 3 XCVML => 2 XMNCP
12 VRPVC, 27 CNZTR => 2 XDBXC
15 KTJDG, 12 BHXH => 5 XCVML
3 BHXH, 2 VRPVC => 7 MZWV
121 ORE => 7 VRPVC
7 XCVML => 6 RJRHP
5 BHXH, 4 VRPVC => 5 LTCX", "FUEL" => 460664 ; "example 3")]
fn test_part2(input: &str, name: &str) -> usize {
let mut reactions = string_to_reactions(input);
output_for_size(&mut reactions, &name.to_string())
}
} |
#![allow(clippy::needless_doctest_main)]
//! This crate provides an easy way to extract data from HTML.
//!
//! [`HtmlExtractor`] is neither a parser nor a deserializer.
//! It picks up only the desired data from HTML.
//!
//! [`html_extractor!`](macro.html_extractor.html) will help to implement [`HtmlExtractor`].
//!
//! # Examples
//! ## Extracting a simple value from HTML
//! ```
//! use html_extractor::{html_extractor, HtmlExtractor};
//! html_extractor! {
//! #[derive(Debug, PartialEq)]
//! Foo {
//! foo: usize = (text of "#foo"),
//! }
//! }
//!
//! fn main() {
//! let input = r#"
//! <div id="foo">1</div>
//! "#;
//! let foo = Foo::extract_from_str(input).unwrap();
//! assert_eq!(foo, Foo { foo: 1 });
//! }
//! ```
//!
//! ## Extracting a collection from HTML
//! ```
//! use html_extractor::{html_extractor, HtmlExtractor};
//! html_extractor! {
//! #[derive(Debug, PartialEq)]
//! Foo {
//! foo: Vec<usize> = (text of ".foo", collect),
//! }
//! }
//!
//! fn main() {
//! let input = r#"
//! <div class="foo">1</div>
//! <div class="foo">2</div>
//! <div class="foo">3</div>
//! <div class="foo">4</div>
//! "#;
//! let foo = Foo::extract_from_str(input).unwrap();
//! assert_eq!(foo, Foo { foo: vec![1, 2, 3, 4] });
//! }
//! ```
//!
//! ## Extracting with regex
//! ```
//! use html_extractor::{html_extractor, HtmlExtractor};
//! html_extractor! {
//! #[derive(Debug, PartialEq)]
//! Foo {
//! (foo: usize,) = (text of "#foo", capture with "^foo=(.*)$"),
//! }
//! }
//!
//! fn main() {
//! let input = r#"
//! <div id="foo">foo=1</div>
//! "#;
//! let foo = Foo::extract_from_str(input).unwrap();
//! assert_eq!(foo, Foo { foo: 1 });
//! }
//! ```
#[doc(hidden)]
pub extern crate lazy_static;
#[doc(hidden)]
pub extern crate regex;
#[doc(hidden)]
pub extern crate scraper;
pub use error::Error;
pub mod error;
/// Generates structures that implement [`HtmlExtractor`].
///
/// # Syntax
///
/// ## Defining structures
/// In this macro, zero or more structures can be defined.
///
/// Attributes can be attached to the structures, but currently attributes that may remove the structures (like `#[cfg]`) will not work.
/// ```no_run
/// # use html_extractor::html_extractor;
/// # fn main() {}
/// html_extractor! {
/// //private structure
/// Foo {
/// //fields...
/// }
/// //any visibilities and some attributes can be used
/// #[derive(Debug, Clone)]
/// pub(crate) Bar {
/// //fields...
/// }
/// }
/// ```
///
/// ## Defining fields in structures
/// There are two types of fields, "single field" and "tuple field".
/// Tuple fields are used to [capture data with regex](#capture-specifier).
///
/// Each field definition has a declaration part and an [extractor](#extractor-part-of-field-definitions) part.
///
/// Attributes can be attached to the fields, but currently attributes that may remove the fields (like `#[cfg]`) will not work.
/// ```no_run
/// # use html_extractor::html_extractor;
/// # fn main() {}
/// html_extractor! {
/// Foo {
/// //single field
/// pub foo: usize = (text of "#foo"),
/// //^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^
/// // declaration extractor
///
/// //tuple field
/// (pub bar: usize, pub baz: usize) = (text of "#bar-baz", capture with "bar=(.*),baz=(.*)"),
/// //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
/// // declaration extractor
/// }
/// }
/// ```
///
/// ## Extractor part of field definitions
/// The extractor part of field definitions specifies how to extract data from HTML.
/// Extractor consists of [Target](#target-specifier), [Capture](#capture-specifier), [Collector](#collector-specifier) and [Parser](#parser-specifier) specifier.
///
/// The order of specifiers does not matter. If the same specifier is written multiple times, the one given later applies.
/// ### Target specifier
/// Target specifier specifies a selector to select an element (or elements) and what of the selected element is extracted.
///
/// If the specified selector is invalid, it will be a compile error.
/// If `text of ..` or `attr[..] of ..` is used, the type of field must implement [`FromStr`](std::str::FromStr).
/// If `elem of ..` is used, the type of field must implement [`HtmlExtractor`].
/// If `text of ..` is used, leading and trailing whitespace removed from the extracted string.
/// If `presence of ..` is used, the type must be `bool` and any other specifier cannot be used,
/// ```
/// use html_extractor::{html_extractor, HtmlExtractor};
/// html_extractor! {
/// #[derive(Debug, PartialEq)]
/// Foo {
/// // extracts the first text node in the element that first matched the selector "#foo"
/// foo: usize = (text of "#foo"),
/// // extracts the third text node in the element that first matched the selector "#bar"
/// bar: usize = (text[2] of "#bar"),
/// // extracts attribute "data-baz" in the element that first matched the selector "#baz"
/// baz: usize = (attr["data-baz"] of "#baz"),
/// // extracts an element that first matched the selector "#qux" and parse it with `HtmlExtractor::extract()`
/// qux: Qux = (elem of "#qux"),
/// // extracts inner HTML of the element that first matched the selector "#grault",
/// grault: String = (inner_html of "#grault"),
/// // stores if the elements that matches the selector "#garply" exist.
/// garply: bool = (presence of "#garply"),
/// }
/// #[derive(Debug, PartialEq)]
/// Qux {
/// corge: usize = (text of "#corge"),
/// }
/// }
///
/// fn main() {
/// let input = r#"
/// <div id="foo">1</div>
/// <div id="bar">ignore first<br>ignore second<br>2</div>
/// <div id="baz" data-baz="3"></div>
/// <div id="qux">
/// <div id="corge">4</div>
/// </div>
/// <div id="grault">
/// inner<br>html
/// </div>
/// "#;
/// let foo = Foo::extract_from_str(input).unwrap();
/// assert_eq!(foo, Foo {
/// foo: 1,
/// bar: 2,
/// baz: 3,
/// qux: Qux { corge: 4 },
/// grault: "inner<br>html".to_owned(),
/// garply: false,
/// });
/// }
/// ```
/// ### Capture specifier
/// Capture specifier specifies an regex that is used to capture desired data from the string that is extracted with target specifier.
///
/// The number of captures and the number of tuple elements must be the same.
///
/// If the specified regex is invalid, it will be a compile error.
///
/// It cannot be used with target specifier `elem of ..`.
///
/// If it is used without [collect specifier](#collect-specifier), the field must be a [tuple field](#defining-fields-in-structures).
/// If it is used with [collect specifier](#collect-specifier), the type of the field must be [`FromIterator`](std::iter::FromIterator) of tuple.
/// ```
/// use html_extractor::{html_extractor, HtmlExtractor};
/// html_extractor! {
/// #[derive(Debug, PartialEq)]
/// Foo {
/// // extracts a string from the first text node in the element that matches the selector "#foo-bar",
/// // and captures two data from the string with the regex "foo=(.*), bar=(.*)"
/// (foo: usize, bar: usize) = (text of "#foo-bar", capture with "foo=(.*), bar=(.*)"),
///
/// // extracts strings from the first text node in all elements that matches the selector ".baz-qux-corge",
/// // captures three data from each string with the regex "baz=(.*), qux=(.*), corge=(.*)" ,
/// // and collects into `Vec<(usize, usize, usize)>`
/// baz_qux_corge: Vec<(usize, usize, usize)> = (text of ".baz-qux-corge", capture with "baz=(.*), qux=(.*), corge=(.*)", collect),
/// }
/// }
///
/// fn main() {
/// let input = r#"
/// <div id="foo-bar">foo=1, bar=2</div>
///
/// <div class="baz-qux-corge">baz=1, qux=2, corge=3</div>
/// <div class="baz-qux-corge">baz=4, qux=5, corge=6</div>
/// <div class="baz-qux-corge">baz=7, qux=8, corge=9</div>
/// <div class="baz-qux-corge">baz=10, qux=11, corge=12</div>
/// "#;
/// let foo = Foo::extract_from_str(input).unwrap();
/// assert_eq!(foo, Foo {
/// foo: 1,
/// bar: 2,
/// baz_qux_corge: vec![(1, 2, 3), (4, 5, 6), (7, 8, 9), (10, 11, 12)],
/// });
/// }
/// ```
///
/// ### Collector specifier
/// Collector specifier specifies how to collect HTML elements.
/// The default collector is "first", which collects only the first matched element.
/// The "collect" collector collects all the element into the type that implements [`FromIterator`](std::iter::FromIterator).
/// The "optional" collector collects the first element if it exists. If not, it emits `None`.
/// ```
/// use html_extractor::{html_extractor, HtmlExtractor};
/// html_extractor! {
/// #[derive(Debug, PartialEq)]
/// Foo {
/// // extracts the first text node from each element that matches the selector ".foo", and collect them into `Vec<usize>`.
/// foo: Vec<usize> = (text of ".foo", collect),
///
/// // extracts all the elements that match that selector "#bar",
/// // parses them with `HtmlExtractor::extract()`,
/// // and collects into `Vec<Bar>`.
/// bar: Vec<Bar> = (elem of "#bar", collect),
///
/// // extracts strings from the first text node in all elements that matches the selector ".baz-qux-corge",
/// // captures three data from each string with the regex "baz=(.*), qux=(.*), corge=(.*)" ,
/// // and collects into `Vec<(usize, usize, usize)>`
/// baz_qux_corge: Vec<(usize, usize, usize)> = (text of ".baz-qux-corge", capture with "baz=(.*), qux=(.*), corge=(.*)", collect),
///
/// // optionally extracts the first text node in the first element that matches the selector ".grault".
/// grault: Option<usize> = (text of ".grault", optional),
/// }
/// #[derive(Debug, PartialEq)]
/// Bar {
/// bar: usize = (text of ".bar-data"),
/// }
/// }
///
/// fn main() {
/// let input = r#"
/// <div class="foo">1</div>
/// <div class="foo">2</div>
/// <div class="foo">3</div>
/// <div class="foo">4</div>
///
/// <div id="bar"><div class="bar-data">1</div></div>
/// <div id="bar"><div class="bar-data">2</div></div>
/// <div id="bar"><div class="bar-data">3</div></div>
/// <div id="bar"><div class="bar-data">4</div></div>
///
/// <div class="baz-qux-corge">baz=1, qux=2, corge=3</div>
/// <div class="baz-qux-corge">baz=4, qux=5, corge=6</div>
/// <div class="baz-qux-corge">baz=7, qux=8, corge=9</div>
/// <div class="baz-qux-corge">baz=10, qux=11, corge=12</div>
/// "#;
/// let foo = Foo::extract_from_str(input).unwrap();
/// assert_eq!(foo, Foo {
/// foo: vec![1, 2, 3, 4],
/// bar: vec![
/// Bar { bar: 1 },
/// Bar { bar: 2 },
/// Bar { bar: 3 },
/// Bar { bar: 4 },
/// ],
/// baz_qux_corge: vec![(1, 2, 3), (4, 5, 6), (7, 8, 9), (10, 11, 12)],
/// grault: None,
/// });
/// }
/// ```
/// ### Parser specifier
/// Parser specifier specifies the parser used to parse the extracted string.
/// The default parser is [`::std::str::FromStr::from_str`].
/// The parser must be `Fn(&str) -> Result<_, T> where T: std::fmt::Debug`
/// ```
/// use html_extractor::{html_extractor, HtmlExtractor};
/// html_extractor! {
/// #[derive(Debug, PartialEq)]
/// Foo {
/// // extracts using a custom parser.
/// foo: usize = (text of "#foo", parse with custom_parser),
/// }
/// }
/// fn custom_parser(input: &str) -> Result<usize, std::num::ParseIntError> {
/// input.replace(",", "").parse()
/// }
///
/// fn main() {
/// let input = r#"
/// <div id="foo">1,000,000,000</div>
/// "#;
/// let foo = Foo::extract_from_str(input).unwrap();
/// assert_eq!(foo, Foo {
/// foo: 1000000000,
/// });
/// }
/// ```
///
/// # Usage of the generated structures
/// The generated structures implement trait [`HtmlExtractor`].
/// See the document of the trait.
pub use html_extractor_macros::html_extractor;
/// A trait for extracting data from HTML documents.
///
/// It is recommended to use [`html_extractor!`](macro.html_extractor.html) to implement `HtmlExtractor`.
pub trait HtmlExtractor
where
Self: Sized,
{
/// Extracts data from [`scraper::element_ref::ElementRef`].
fn extract(elem: &scraper::ElementRef) -> Result<Self, Error>;
/// Parses HTML string and extracts data from it.
fn extract_from_str(html_str: &str) -> Result<Self, Error> {
let html = scraper::Html::parse_document(html_str);
HtmlExtractor::extract(&html.root_element())
}
}
#[cfg(test)]
mod test;
|
//! Main optimization struct
use std::cmp::Ordering;
use crate::{
acquisition::ExpectedImprovement, kernels::RBF, lbfgs_opt::minimize, posterior::Laplace,
DataPreferences, DataSamples,
};
use anyhow::Result;
use dialoguer::{theme::ColorfulTheme, Select};
use itertools::Itertools;
use nalgebra::{Cholesky, DMatrix, DVector, Dynamic, MatrixXx2, RowDVector};
use rand::{distributions::Uniform, prelude::Distribution};
use statrs::distribution::Normal;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum OptError {
#[error("No samples given, try using `PreferenceOpt::new`")]
NoSamples,
#[error("Invalid preference index: {0}")]
InvalidPreference(usize),
#[error("The problem have {dims} dimensions but {n_bounds} bounds where given")]
InvalidBounds { dims: usize, n_bounds: usize },
#[error("The `low` bound ({low}) is higher than the `high` bound ({high})")]
InvalidBoundLimits { low: f64, high: f64 },
#[error("The kernel is not returning a positive-definite matrix. Try gradually increasing the `alpha` parameter on PreferenceOpt")]
CholeskyNotFound,
}
fn ask_user(
current: Vec<f64>,
current_idx: usize,
proposal: Vec<f64>,
proposal_idx: usize,
) -> Option<(usize, usize)> {
let items = vec![
format!("Current best: {:.2?}", current),
format!("Proposal : {:.2?}", proposal),
"Quit".into(),
];
let selection = Select::with_theme(&ColorfulTheme::default())
.items(&items)
.default(0)
.interact()
.unwrap();
match selection {
0 => Some((current_idx, proposal_idx)),
1 => Some((proposal_idx, current_idx)),
_ => None,
}
}
fn row2vec(x: &DMatrix<f64>, row: usize) -> Vec<f64> {
x.row(row).iter().map(|&o| o).collect::<Vec<_>>()
}
#[derive(Debug)]
pub struct PreferenceOpt {
pub x: DataSamples,
pub m: DataPreferences,
pub bounds: Option<Vec<(f64, f64)>>,
pub kernel: RBF,
/// Value added to the diagonal of the kernel matrix during fitting.
/// Larger values correspond to increased noise level in the observations.
pub alpha: f64,
pub posterior: Option<DVector<f64>>,
pub post_approx: Laplace,
pub acquisition: ExpectedImprovement,
pub distribution: Normal,
l_: Option<Cholesky<f64, Dynamic>>,
distributions: Option<Vec<Uniform<f64>>>,
}
impl PreferenceOpt {
/// Creates a new optimization from data.
///
/// # Examples
///
/// ```
/// # use preference_opt::PreferenceOpt;
/// let opt = PreferenceOpt::new(5);
/// ```
pub fn new(dims: usize) -> Self {
let x = DataSamples::new(dims);
let m = DataPreferences::new(vec![]);
Self {
x,
m,
bounds: None,
kernel: RBF::default(),
alpha: 1e-5,
posterior: None,
post_approx: Laplace::default(),
acquisition: ExpectedImprovement::default(),
distribution: Normal::new(0.0, 1.0).unwrap(),
l_: None,
distributions: None,
}
}
/// Creates a new optimization from data.
///
/// # Examples
///
/// ```
/// # use preference_opt::PreferenceOpt;
/// let x = vec![vec![0.0, 1.0], vec![4.0, 3.0], vec![2.0, 3.0]];
/// let m = vec![(0, 1), (2, 0)];
/// let opt = PreferenceOpt::from_data(x, m).unwrap();
/// ```
pub fn from_data(samples: Vec<Vec<f64>>, preferences: Vec<(usize, usize)>) -> Result<Self> {
if samples.len() == 0 {
return Err(OptError::NoSamples.into());
}
let x = DataSamples::from_data(samples)?;
let m = DataPreferences::new(preferences);
if m.max() >= x.len() {
return Err(OptError::InvalidPreference(m.max()).into());
}
Ok(Self {
x,
m,
bounds: None,
kernel: RBF::default(),
alpha: 1e-5,
posterior: None,
post_approx: Laplace::default(),
acquisition: ExpectedImprovement::default(),
distribution: Normal::new(0.0, 1.0).unwrap(),
l_: None,
distributions: None,
})
}
/// Get the number of dimensions to optimize.
pub fn dims(&self) -> usize {
self.x.data.ncols()
}
/// Prepare samples if the are less than
pub fn init_samples(&mut self) {
let n = self.x.len();
if n < 2 {
let rows2insert = 2 - n;
let x_append = self.random_sample(rows2insert);
let mut x = self.x.data.clone().insert_rows(n, rows2insert, 0.0);
for (i, row) in x_append.row_iter().enumerate() {
for (j, &o) in row.iter().enumerate() {
x[(n + i, j)] = o;
}
}
self.x.data = x;
}
}
/// Define bounds for the optimization problem (inclusive bounds).
///
/// # Examples
///
/// ```
/// # use preference_opt::PreferenceOpt;
/// let x = vec![vec![0.0, 1.0], vec![4.0, 3.0], vec![2.0, 3.0]];
/// let m = vec![(0, 1), (2, 0)];
/// let opt = PreferenceOpt::from_data(x, m)?.with_bounds(vec![(0.0, 10.0), (0.0, 10.0)])?;
/// # Ok::<(), anyhow::Error>(())
/// ```
pub fn with_bounds(mut self, bounds: Vec<(f64, f64)>) -> Result<Self> {
if bounds.len() != self.dims() {
return Err(OptError::InvalidBounds {
dims: self.dims(),
n_bounds: bounds.len(),
}
.into());
}
for &(low, high) in bounds.iter() {
if low >= high {
return Err(OptError::InvalidBoundLimits { low, high }.into());
}
}
self.distributions = Some(
bounds
.iter()
.map(|(low, high)| Uniform::new_inclusive(low, high))
.collect::<Vec<_>>(),
);
self.bounds = Some(bounds);
Ok(self)
}
/// Define the same bounds for all dimensions of the optimization problem (inclusive bounds).
///
/// # Examples
///
/// ```
/// # use preference_opt::PreferenceOpt;
/// let x = vec![vec![0.0, 1.0], vec![4.0, 3.0], vec![2.0, 3.0]];
/// let m = vec![(0, 1), (2, 0)];
/// let opt = PreferenceOpt::from_data(x, m)?.with_same_bounds((0.0, 10.0))?;
/// # Ok::<(), anyhow::Error>(())
/// ```
pub fn with_same_bounds(self, bounds: (f64, f64)) -> Result<Self> {
let bounds = vec![bounds; self.dims()];
self.with_bounds(bounds)
}
/// Get sample to test.
/// This is useful for implementing a manual optimization in conjuction with `add_preference`, be sure to keep track of `f_prior`.
/// Returns (sample1, index1, sample2, index2, f_prior)
///
/// # Arguments
///
/// * `f_prior` - Prior with mean zero is applied by default
/// * `n_init` - Number of initialization points for the solver, obtained by randomly sampling the acquisition function
/// * `n_solve` - The solver will be run n_solve times (cannot be superior to n_init)
pub fn get_next_sample(
&mut self,
f_prior: Option<Vec<f64>>,
n_init: usize,
n_solve: usize,
) -> Result<(Vec<f64>, usize, Vec<f64>, usize, Option<Vec<f64>>)> {
self.init_samples();
if self.m.data.len() == 0 {
let row0 = row2vec(&self.x.data, 0);
let row1 = row2vec(&self.x.data, 1);
return Ok((row0, 0, row1, 1, None));
}
let mut x = self.x.data.clone();
let m = self.m.data.clone();
let n = x.nrows();
let mut f_prior = match f_prior {
Some(prior) => DVector::from_vec(prior.clone()),
None => DVector::zeros(n),
};
let m_ind_cpt = if m.nrows() > 0 { m.nrows() - 1 } else { 0 };
let (m_ind_current, m_ind_proposal) =
self.get_next_pair(&mut x, &m, &mut f_prior, m_ind_cpt, n_init, n_solve)?;
let current = row2vec(&x, m_ind_current);
let proposal = row2vec(&x, m_ind_proposal);
let f_prior = f_prior.iter().map(|&o| o).collect::<Vec<_>>();
self.x.data = x;
Ok((
current,
m_ind_current,
proposal,
m_ind_proposal,
Some(f_prior),
))
}
/// Adds a preference to the data.
/// This is useful for implementing a manual optimization in conjuction with `get_next_sample`.
pub fn add_preference(&mut self, preference: usize, other: usize) {
let mut m = self.m.data.clone();
let n = m.nrows();
m = m.insert_row(n, 0);
m[(n, 0)] = preference;
m[(n, 1)] = other;
self.m.data = m;
}
/// Get the optimal set of values.
pub fn get_optimal_values(&self) -> Vec<f64> {
let idx = self.m.data[(self.m.len() - 1, 0)];
row2vec(&self.x.data, idx)
}
/// Bayesian optimization via preferences inputs.
/// Returns (optimal_values, f_posterior)
///
/// # Arguments
///
/// * `func` - Function to optimize
/// * `max_iters` - Maximum number of iterations to be performed for the bayesian optimization
/// * `f_prior` - Prior with mean zero is applied by default
/// * `n_init` - Number of initialization points for the solver, obtained by randomly sampling the acquisition function
/// * `n_solve` - The solver will be run n_solve times (cannot be superior to n_init)
pub fn interactive_optimization(
&mut self,
max_iters: usize,
f_prior: Option<DVector<f64>>,
n_init: usize,
n_solve: usize,
) -> Result<(RowDVector<f64>, DVector<f64>)> {
self.init_samples();
let mut x = self.x.data.clone();
let mut m = if self.m.len() == 0 {
let row0 = row2vec(&x, 0);
let row1 = row2vec(&x, 1);
let (a, b) = ask_user(row0, 0, row1, 1).unwrap();
let m = MatrixXx2::from_vec(vec![a, b]);
self.m.data = m.clone();
m
} else {
self.m.data.clone()
};
let n = x.nrows();
let mut f_prior = f_prior.map(|o| o.clone()).unwrap_or(DVector::zeros(n));
let m_last_idx = if m.nrows() > 0 { m.nrows() - 1 } else { 0 };
for m_ind_cpt in m_last_idx..(m_last_idx + max_iters) {
let (m_ind_current, m_ind_proposal) =
self.get_next_pair(&mut x, &m, &mut f_prior, m_ind_cpt, n_init, n_solve)?;
let current = row2vec(&x, m_ind_current);
let proposal = row2vec(&x, m_ind_proposal);
match ask_user(current, m_ind_current, proposal, m_ind_proposal) {
Some(new_pair) => {
let n = m.nrows();
m = m.insert_row(n, 0);
m[(n, 0)] = new_pair.0;
m[(n, 1)] = new_pair.1;
}
None => break,
}
}
let idx = m[(m.nrows() - 1, 0)];
let optimal_values = x.row(idx).clone_owned();
let f_posterior = f_prior;
self.x.data = x;
self.m.data = m;
Ok((optimal_values, f_posterior))
}
/// Optimizes the problem based on a function.
/// Returns (optimal_values, f_posterior)
///
/// # Arguments
///
/// * `func` - Function to optimize
/// * `max_iters` - Maximum number of iterations to be performed for the bayesian optimization
/// * `f_prior` - Prior with mean zero is applied by default
/// * `n_init` - Number of initialization points for the solver, obtained by randomly sampling the acquisition function
/// * `n_solve` - The solver will be run n_solve times (cannot be superior to n_init)
pub fn optimize_fn(
&mut self,
func: fn(&[f64]) -> f64,
max_iters: usize,
f_prior: Option<DVector<f64>>,
n_init: usize,
n_solve: usize,
) -> Result<(RowDVector<f64>, DVector<f64>)> {
self.init_samples();
let mut x = self.x.data.clone();
let mut m = if self.m.len() == 0 {
let row0 = func(&row2vec(&x, 0));
let row1 = func(&row2vec(&x, 1));
let m = if row0 < row1 {
MatrixXx2::from_vec(vec![1, 0])
} else {
MatrixXx2::from_vec(vec![0, 1])
};
self.m.data = m.clone();
m
} else {
self.m.data.clone()
};
let n = x.nrows();
let mut f_prior = f_prior.map(|o| o.clone()).unwrap_or(DVector::zeros(n));
let m_last_idx = if m.nrows() > 0 { m.nrows() - 1 } else { 0 };
for m_ind_cpt in m_last_idx..(m_last_idx + max_iters) {
let (m_ind_current, m_ind_proposal) =
self.get_next_pair(&mut x, &m, &mut f_prior, m_ind_cpt, n_init, n_solve)?;
let current = func(&row2vec(&x, m_ind_current));
let proposal = func(&row2vec(&x, m_ind_proposal));
let new_pair = if current < proposal {
(m_ind_proposal, m_ind_current)
} else {
(m_ind_current, m_ind_proposal)
};
let n = m.nrows();
m = m.insert_row(n, 0);
m[(n, 0)] = new_pair.0;
m[(n, 1)] = new_pair.1;
}
let idx = m[(m.nrows() - 1, 0)];
let optimal_values = x.row(idx).clone_owned();
let f_posterior = f_prior;
self.x.data = x;
self.m.data = m;
Ok((optimal_values, f_posterior))
}
/// Outputs the index of the current best sample and a proposal sample.
fn get_next_pair(
&mut self,
x: &mut DMatrix<f64>,
m: &MatrixXx2<usize>,
f_prior: &mut DVector<f64>,
m_ind_cpt: usize,
n_init: usize,
n_solve: usize,
) -> Result<(usize, usize)> {
self.x.data = x.clone();
self.fit(x, f_prior)?;
let x_optim = self.bayesopt(n_init, n_solve);
let x_optim = DMatrix::from_rows(&[x_optim]);
let f_optim = self.predict(&x_optim, false).0;
let _f_prior = self.posterior.clone().unwrap();
let n = _f_prior.nrows();
*f_prior = _f_prior.insert_row(n, f_optim[0]);
let n = x.nrows();
*x = x.clone().insert_row(n, 0.0);
x_optim
.row(0)
.iter()
.enumerate()
.for_each(|(i, &o)| x[(n, i)] = o);
// current preference index in X
let m_ind_current = m[(m.nrows() - 1, 0)];
// suggestion index in X
let m_ind_proposal = m_ind_cpt + 2;
Ok((m_ind_current, m_ind_proposal))
}
/// Fit a Gaussian process probit regression model.
fn fit(&mut self, x: &DMatrix<f64>, f_prior: &DVector<f64>) -> Result<()> {
// compute quantities required for prediction
let mut k = self.kernel.apply(x, None);
k.set_diagonal(&k.diagonal().add_scalar(self.alpha));
self.l_ = Some(k.cholesky().ok_or(OptError::CholeskyNotFound)?);
// compute the posterior distribution of f
self.posterior = Some(self.post_approx.apply(
&f_prior,
&self.m.data,
self.l_.as_ref().unwrap(),
&self.distribution,
));
Ok(())
}
/// Bayesian optimization based on the optimization of a
/// utility function of the attributes of the posterior distribution.
fn bayesopt(&self, n_init: usize, n_solve: usize) -> RowDVector<f64> {
let y_max = self.posterior.as_ref().unwrap().max();
let x_tries = self.random_sample(n_init);
let aqc_optim = |x| {
let (y_mean, std) = self.predict(&x, true);
let std = std.unwrap();
self.acquisition
.apply(y_mean, std, y_max, &self.distribution)
};
let ys = aqc_optim(x_tries.clone_owned());
let (x_argmax, mut max_acq) = ys.argmax();
let mut x_max = x_tries.row(x_argmax).clone_owned();
let x_seeds = ys
.into_iter()
.enumerate()
.sorted_by(|a, b| a.1.partial_cmp(b.1).unwrap_or(Ordering::Less))
.map(|o| x_tries.row(o.0).clone_owned())
.take(n_solve);
for x_try in x_seeds {
// Find the minimum of -1 * acquisition function
let (best, best_value) =
minimize(|o| -aqc_optim(o)[0], x_try, self.bounds.as_ref().unwrap());
// Store it if better than previous minimum(maximum)
if (-best_value) >= max_acq {
x_max = best.row(0).clone_owned();
max_acq = -best_value;
}
}
x_max
}
/// Samples to warm up with random points.
fn random_sample(&self, n: usize) -> DMatrix<f64> {
let mut rng = rand::thread_rng();
let x = self
.distributions
.as_ref()
.unwrap()
.iter()
.map(|o| o.sample_iter(&mut rng).take(n).collect::<Vec<_>>())
.flatten()
.collect::<Vec<_>>();
DMatrix::from_vec(n, self.dims(), x)
}
/// Predict using the Gaussian process regression model.
/// Returns mean and standard deviation of predictive distribution at query points `x`.
fn predict(&self, x: &DMatrix<f64>, return_std: bool) -> (DVector<f64>, Option<DVector<f64>>) {
let l_ = self.l_.as_ref().unwrap();
let k_trans = self.kernel.apply(&self.x.data, Some(x));
let lk = l_.solve(&k_trans);
let lf = l_.solve(self.posterior.as_ref().unwrap());
let y_mean = lk.transpose() * lf;
let std = if return_std {
let y_var = self.kernel.apply(x, None).diagonal() - lk.map(|o| o.powi(2)).row_sum_tr();
let std = y_var.map(|o| if o < 0.0 { 0.0 } else { o.sqrt() });
Some(std)
} else {
None
};
(y_mean, std)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
#[should_panic(expected = "Invalid preference index: 3")]
fn from_data_fails() {
let x = vec![vec![0.0, 1.0], vec![2.0, 3.0]];
let m = vec![(0, 1), (2, 3)];
PreferenceOpt::from_data(x, m).unwrap();
}
#[test]
#[should_panic(expected = "The `low` bound (2) is higher than the `high` bound (1)")]
fn with_bounds_fail() {
let x = vec![vec![0.0, 1.0], vec![4.0, 3.0], vec![2.0, 3.0]];
let m = vec![(0, 1)];
PreferenceOpt::from_data(x, m)
.unwrap()
.with_bounds(vec![(2.0, 1.0), (0.0, 1.0)])
.unwrap();
}
#[test]
#[should_panic(expected = "The problem have 2 dimensions but 1 bounds where given")]
fn with_bounds_fail_dims() {
let x = vec![vec![0.0, 1.0], vec![4.0, 3.0], vec![2.0, 3.0]];
let m = vec![(0, 1)];
PreferenceOpt::from_data(x, m)
.unwrap()
.with_bounds(vec![(0.0, 10.0)])
.unwrap();
}
#[test]
fn random_sample() -> Result<()> {
let samples = vec![vec![1.0, 0.0]];
let preferences = vec![];
let opt = PreferenceOpt::from_data(samples, preferences)?
.with_bounds(vec![(0.0, 10.0), (20.0, 40.0)])?;
let res = opt.random_sample(10);
assert_eq!(res.nrows(), 10);
assert!(res.column(0).max() < 20.0);
assert!(res.column(1).min() > 10.0);
Ok(())
}
#[test]
fn init_samples() -> Result<()> {
let mut opt = PreferenceOpt::new(2).with_same_bounds((0.0, 10.0))?;
opt.init_samples();
println!("{}", opt.x.data);
Ok(())
}
#[test]
fn optimize_fn_test() -> Result<()> {
let samples = vec![vec![1.0, 3.0], vec![2.0, 5.0]];
let preferences = vec![(1, 0)];
let mut opt =
PreferenceOpt::from_data(samples, preferences)?.with_same_bounds((0.0, 10.0))?;
let func = |o: &[f64]| o.iter().sum();
let (optimal_values, f_posterior) = opt.optimize_fn(func, 1, None, 1, 1)?;
println!("optimal_values -> {}", optimal_values);
println!("f_posterior -> {}", f_posterior);
opt.x.show();
opt.m.show();
let (optimal_values, f_posterior) = opt.optimize_fn(func, 1, Some(f_posterior), 1, 1)?;
println!("optimal_values -> {}", optimal_values);
println!("f_posterior -> {}", f_posterior);
opt.x.show();
opt.m.show();
let samples = vec![vec![1.0, 0.0, 7.0], vec![2.0, 5.0, 3.0]];
let preferences = vec![(1, 0)];
let mut opt = PreferenceOpt::from_data(samples, preferences)?.with_bounds(vec![
(0.0, 10.0),
(0.0, 5.0),
(0.0, 30.0),
])?;
let func = |o: &[f64]| o.iter().sum();
let (optimal_values, f_posterior) = opt.optimize_fn(func, 10, None, 10, 3)?;
println!("optimal_values -> {}", optimal_values);
println!("f_posterior -> {}", f_posterior);
opt.x.show();
opt.m.show();
Ok(())
}
#[test]
fn optimize_fn_empty_preferences() -> Result<()> {
let samples = vec![vec![1.0, 3.0], vec![2.0, 5.0]];
let preferences = vec![];
let mut opt =
PreferenceOpt::from_data(samples, preferences)?.with_same_bounds((0.0, 10.0))?;
let func = |o: &[f64]| o.iter().sum();
let (optimal_values, f_posterior) = opt.optimize_fn(func, 1, None, 1, 1)?;
println!("optimal_values -> {}", optimal_values);
println!("f_posterior -> {}", f_posterior);
opt.x.show();
opt.m.show();
Ok(())
}
#[test]
fn optimize_fn_empty_samples() -> Result<()> {
let samples = vec![vec![1.0, 3.0]];
let preferences = vec![];
let mut opt =
PreferenceOpt::from_data(samples, preferences)?.with_same_bounds((0.0, 10.0))?;
let func = |o: &[f64]| o.iter().sum();
let (optimal_values, f_posterior) = opt.optimize_fn(func, 1, None, 1, 1)?;
println!("optimal_values -> {}", optimal_values);
println!("f_posterior -> {}", f_posterior);
opt.x.show();
opt.m.show();
let mut opt = PreferenceOpt::new(2).with_same_bounds((0.0, 10.0))?;
let func = |o: &[f64]| o.iter().sum();
let (optimal_values, f_posterior) = opt.optimize_fn(func, 1, None, 1, 1)?;
println!("optimal_values -> {}", optimal_values);
println!("f_posterior -> {}", f_posterior);
opt.x.show();
opt.m.show();
Ok(())
}
#[test]
fn manual_optimization() -> Result<()> {
let mut opt = PreferenceOpt::new(3).with_same_bounds((0.0, 10.0))?;
let mut prior = None;
for _ in 0..15 {
let (_, idx1, _, idx2, f_prior) = opt.get_next_sample(prior, 1, 1)?;
opt.add_preference(idx1, idx2);
prior = f_prior;
}
opt.x.show();
opt.m.show();
println!("{:?}", opt.get_optimal_values());
Ok(())
}
}
|
use crate::{
error::Error,
graph::{remove_node_id, DepGraph, DependencyMap},
};
use crossbeam_channel::{Receiver, Sender};
use rayon::iter::{
plumbing::{bridge, Consumer, Producer, ProducerCallback, UnindexedConsumer},
IndexedParallelIterator, IntoParallelIterator, ParallelIterator,
};
use std::cmp;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::iter::{DoubleEndedIterator, ExactSizeIterator};
use std::ops;
use std::sync::{
atomic::{AtomicUsize, Ordering},
Arc, RwLock,
};
use std::thread;
use std::time::Duration;
/// Default timeout in milliseconds
const DEFAULT_TIMEOUT: Duration = Duration::from_millis(1000);
/// Add into_par_iter() to DepGraph
impl<I> IntoParallelIterator for DepGraph<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
type Item = Wrapper<I>;
type Iter = DepGraphParIter<I>;
fn into_par_iter(self) -> Self::Iter {
DepGraphParIter::new(self.ready_nodes, self.deps, self.rdeps)
}
}
/// Wrapper for an item
///
/// This is used to pass items through parallel iterators. When the wrapper is
/// dropped, we decrement the processing `counter` and notify the dispatcher
/// thread through the `item_done_tx` channel.
#[derive(Clone)]
pub struct Wrapper<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
// Wrapped item
inner: I,
// Reference to the number of items being currently processed
counter: Arc<AtomicUsize>,
// Channel to notify that the item is done processing (upon drop)
item_done_tx: Sender<I>,
}
impl<I> Wrapper<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
/// Create a new Wrapper item
///
/// This needs a reference to the processing counter to keep count of the
/// number of items currently processed (used to check for circular
/// dependencies) and the item done channel to notify the dispatcher
/// thread.
///
/// Upon creating of a `Wrapper`, we also increment the processing counter.
pub fn new(inner: I, counter: Arc<AtomicUsize>, item_done_tx: Sender<I>) -> Self {
(*counter).fetch_add(1, Ordering::SeqCst);
Self {
inner,
counter,
item_done_tx,
}
}
}
/// Drop implementation to decrement the processing counter and notify the
/// dispatcher thread.
impl<I> Drop for Wrapper<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
/// Triggered when the wrapper is dropped.
///
/// This will decrement the processing counter and notify the dispatcher thread.
fn drop(&mut self) {
(*self.counter).fetch_sub(1, Ordering::SeqCst);
self.item_done_tx
.send(self.inner.clone())
.expect("could not send message")
}
}
/// Dereference implementation to access the inner item
///
/// This allow accessing the item using `(*wrapper)`.
impl<I> ops::Deref for Wrapper<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
type Target = I;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
/// Dereference implementation to access the inner item
///
/// This allow accessing the item using `(*wrapper)`.
impl<I> ops::DerefMut for Wrapper<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
impl<I> Eq for Wrapper<I> where I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static
{}
impl<I> Hash for Wrapper<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
fn hash<H: Hasher>(&self, state: &mut H) {
self.inner.hash(state)
}
}
impl<I> cmp::PartialEq for Wrapper<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
fn eq(&self, other: &Self) -> bool {
self.inner == other.inner
}
}
/// Parallel iterator for DepGraph
pub struct DepGraphParIter<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
timeout: Arc<RwLock<Duration>>,
counter: Arc<AtomicUsize>,
item_ready_rx: Receiver<I>,
item_done_tx: Sender<I>,
}
impl<I> DepGraphParIter<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
/// Create a new parallel iterator
///
/// This will create a thread and crossbeam channels to listen/send
/// available and processed nodes.
pub fn new(ready_nodes: Vec<I>, deps: DependencyMap<I>, rdeps: DependencyMap<I>) -> Self {
let timeout = Arc::new(RwLock::new(DEFAULT_TIMEOUT));
let counter = Arc::new(AtomicUsize::new(0));
// Create communication channel for processed nodes
let (item_ready_tx, item_ready_rx) = crossbeam_channel::unbounded::<I>();
let (item_done_tx, item_done_rx) = crossbeam_channel::unbounded::<I>();
// Inject ready nodes
ready_nodes
.iter()
.for_each(|node| item_ready_tx.send(node.clone()).unwrap());
// Clone Arcs for dispatcher thread
let loop_timeout = timeout.clone();
let loop_counter = counter.clone();
// Start dispatcher thread
thread::spawn(move || {
loop {
crossbeam_channel::select! {
// Grab a processed node ID
recv(item_done_rx) -> id => {
let id = id.unwrap();
// Remove the node from all reverse dependencies
let next_nodes = remove_node_id::<I>(id, &deps, &rdeps)?;
// Send the next available nodes to the channel.
next_nodes
.iter()
.for_each(|node_id| item_ready_tx.send(node_id.clone()).unwrap());
// If there are no more nodes, leave the loop
if deps.read().unwrap().is_empty() {
break;
}
},
// Timeout
default(*loop_timeout.read().unwrap()) => {
let deps = deps.read().unwrap();
let counter_val = loop_counter.load(Ordering::SeqCst);
if deps.is_empty() {
break;
// There are still some items processing.
} else if counter_val > 0 {
continue;
} else {
return Err(Error::ResolveGraphError("circular dependency detected"));
}
},
};
}
// Drop channel
// This will close threads listening to it
drop(item_ready_tx);
Ok(())
});
DepGraphParIter {
timeout,
counter,
item_ready_rx,
item_done_tx,
}
}
pub fn with_timeout(self, timeout: Duration) -> Self {
*self.timeout.write().unwrap() = timeout;
self
}
}
impl<I> ParallelIterator for DepGraphParIter<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
type Item = Wrapper<I>;
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where
C: UnindexedConsumer<Self::Item>,
{
bridge(self, consumer)
}
}
impl<I> IndexedParallelIterator for DepGraphParIter<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
fn len(&self) -> usize {
num_cpus::get()
}
fn drive<C>(self, consumer: C) -> C::Result
where
C: Consumer<Self::Item>,
{
bridge(self, consumer)
}
fn with_producer<CB>(self, callback: CB) -> CB::Output
where
CB: ProducerCallback<Self::Item>,
{
callback.callback(DepGraphProducer {
counter: self.counter.clone(),
item_ready_rx: self.item_ready_rx,
item_done_tx: self.item_done_tx,
})
}
}
struct DepGraphProducer<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
counter: Arc<AtomicUsize>,
item_ready_rx: Receiver<I>,
item_done_tx: Sender<I>,
}
impl<I> Iterator for DepGraphProducer<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
type Item = Wrapper<I>;
fn next(&mut self) -> Option<Self::Item> {
// TODO: Check until there is an item available
match self.item_ready_rx.recv() {
Ok(item) => Some(Wrapper::new(
item,
self.counter.clone(),
self.item_done_tx.clone(),
)),
Err(_) => None,
}
}
}
impl<I> DoubleEndedIterator for DepGraphProducer<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
fn next_back(&mut self) -> Option<Self::Item> {
self.next()
}
}
impl<I> ExactSizeIterator for DepGraphProducer<I> where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static
{
}
impl<I> Producer for DepGraphProducer<I>
where
I: Clone + fmt::Debug + Eq + Hash + PartialEq + Send + Sync + 'static,
{
type Item = Wrapper<I>;
type IntoIter = Self;
fn into_iter(self) -> Self::IntoIter {
Self {
counter: self.counter.clone(),
item_ready_rx: self.item_ready_rx.clone(),
item_done_tx: self.item_done_tx,
}
}
fn split_at(self, _: usize) -> (Self, Self) {
(
Self {
counter: self.counter.clone(),
item_ready_rx: self.item_ready_rx.clone(),
item_done_tx: self.item_done_tx.clone(),
},
Self {
counter: self.counter.clone(),
item_ready_rx: self.item_ready_rx.clone(),
item_done_tx: self.item_done_tx,
},
)
}
}
|
fn main() {
let add_one = |n| n + 1;
println!("{}", add_one(2));
}
|
use proconio::{fastout, input};
fn main() {
input! {
n: usize,
m: usize,
k: i64,
a_vec: [i64; n],
b_vec: [i64; m],
}
let mut a_acc_vec: Vec<i64> = Vec::new();
a_acc_vec.push(0);
a_vec.iter().enumerate().for_each(|(i, a)| {
a_acc_vec.push(a + a_acc_vec.get(i).expect("error at calculating a_acc_vec"))
});
let mut b_acc_vec: Vec<i64> = Vec::new();
b_acc_vec.push(0);
b_vec.iter().enumerate().for_each(|(i, b)| {
b_acc_vec.push(b + b_acc_vec.get(i).expect("error at calculating b_acc_vec"))
});
let (mut ans, mut j): (usize, usize) = (0, m);
'out: for i in 0..=n {
if let Some(a_acc) = a_acc_vec.get(i) {
if *a_acc > k {
break 'out;
}
'inner: while let Some(b_acc) = b_acc_vec.get(j) {
if *b_acc > k - *a_acc {
j -= 1;
} else {
break 'inner;
}
}
ans = ans.max(i + j);
}
}
println!("{}", ans);
}
|
//! libsdp is a small utility library for parsing the sdp protocol.
//! Mostly Intended for SIP user agents.
extern crate nom;
mod lines;
pub use self::lines::{
SdpVersion, parse_version, parse_version_line,
SdpSessionName, parse_session_name, parse_session_name_line,
SdpOrigin, parse_origin, parse_origin_line, SdpTiming,
parse_timing, parse_time_line, SdpConnection, parse_connection,
parse_connection_name, parse_phone_line, parse_email_line,
parse_uri_line, parse_information_line
};
mod attributes;
pub use self::attributes::{
SdpAttribute, SdpAttributeType, SdpOptionalAttribute,
parse_attribute_type, parse_global_attribute,
parse_global_attributes, RtpMap
};
mod media;
pub use self::media::{
SdpMedia, SdpMediaType, SdpMediaFormat,
parse_media, parse_media_lines, SdpEncoding
};
mod core;
pub use self::core::{
SdpNetworkType, parse_network_type,
SdpAddressType, parse_address_type,
SdpCodecIdentifier, parse_codec_identifier,
SdpBandwidth, parse_bandwidth_line,
parse_bandwidth, SdpTransport, parse_transport
};
mod offer;
pub use self::offer::SdpOffer;
pub use self::offer::parse_sdp_offer;
mod sanitizer;
pub use self::sanitizer::SdpSanitizer;
pub use self::sanitizer::SanitizerError;
pub use self::sanitizer::SdpSanitizerConfig;
pub(crate) mod parse;
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Account {
#[serde(flatten)]
pub tracked_resource: TrackedResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<AccountProperties>,
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PatchAccount {
#[serde(flatten)]
pub patch_tracked_resource: PatchTrackedResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<AccountProperties>,
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AccountProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AccountList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Account>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EnterprisePolicy {
#[serde(flatten)]
pub tracked_resource: TrackedResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<EnterprisePolicyIdentity>,
pub kind: EnterprisePolicyKind,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<Properties>,
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PatchEnterprisePolicy {
#[serde(flatten)]
pub patch_tracked_resource: PatchTrackedResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub identity: Option<EnterprisePolicyIdentity>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub kind: Option<EnterprisePolicyKind>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<Properties>,
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Properties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub lockbox: Option<properties::Lockbox>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub encryption: Option<properties::Encryption>,
#[serde(rename = "networkInjection", default, skip_serializing_if = "Option::is_none")]
pub network_injection: Option<properties::NetworkInjection>,
}
pub mod properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Lockbox {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub state: Option<State>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Encryption {
#[serde(rename = "keyVault", default, skip_serializing_if = "Option::is_none")]
pub key_vault: Option<KeyVaultProperties>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub state: Option<State>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct NetworkInjection {
#[serde(rename = "virtualNetworks", default, skip_serializing_if = "Option::is_none")]
pub virtual_networks: Option<VirtualNetworkPropertiesList>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EnterprisePolicyIdentity {
#[serde(rename = "systemAssignedIdentityPrincipalId", default, skip_serializing_if = "Option::is_none")]
pub system_assigned_identity_principal_id: Option<String>,
#[serde(rename = "tenantId", default, skip_serializing_if = "Option::is_none")]
pub tenant_id: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<enterprise_policy_identity::Type>,
}
pub mod enterprise_policy_identity {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
SystemAssigned,
None,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum State {
Enabled,
Disabled,
NotConfigured,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum EnterprisePolicyKind {
Lockbox,
PrivateEndpoint,
Encryption,
NetworkInjection,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KeyVaultProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub key: Option<KeyProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct KeyProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualNetworkPropertiesList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<VirtualNetworkProperties>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct VirtualNetworkProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub subnet: Option<SubnetProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SubnetProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EnterprisePolicyList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<EnterprisePolicy>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateEndpointConnection>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnection {
#[serde(flatten)]
pub proxy_resource: ProxyResource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateEndpointConnectionProperties>,
#[serde(rename = "systemData", default, skip_serializing_if = "Option::is_none")]
pub system_data: Option<SystemData>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDetail {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorDetail>,
#[serde(rename = "additionalInfo", default, skip_serializing_if = "Vec::is_empty")]
pub additional_info: Vec<ErrorAdditionalInfo>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorAdditionalInfo {
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub info: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct SystemData {
#[serde(rename = "createdBy", default, skip_serializing_if = "Option::is_none")]
pub created_by: Option<String>,
#[serde(rename = "createdByType", default, skip_serializing_if = "Option::is_none")]
pub created_by_type: Option<system_data::CreatedByType>,
#[serde(rename = "createdAt", default, skip_serializing_if = "Option::is_none")]
pub created_at: Option<String>,
#[serde(rename = "lastModifiedBy", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by: Option<String>,
#[serde(rename = "lastModifiedByType", default, skip_serializing_if = "Option::is_none")]
pub last_modified_by_type: Option<system_data::LastModifiedByType>,
#[serde(rename = "lastModifiedAt", default, skip_serializing_if = "Option::is_none")]
pub last_modified_at: Option<String>,
}
pub mod system_data {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum CreatedByType {
User,
Application,
ManagedIdentity,
Key,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum LastModifiedByType {
User,
Application,
ManagedIdentity,
Key,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct TrackedResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
pub location: String,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PatchTrackedResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")]
pub is_data_action: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<operation::Origin>,
#[serde(rename = "actionType", default, skip_serializing_if = "Option::is_none")]
pub action_type: Option<operation::ActionType>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Origin {
#[serde(rename = "user")]
User,
#[serde(rename = "system")]
System,
#[serde(rename = "user,system")]
UserSystem,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ActionType {
Internal,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpointConnectionProperties {
#[serde(rename = "privateEndpoint", default, skip_serializing_if = "Option::is_none")]
pub private_endpoint: Option<PrivateEndpoint>,
#[serde(rename = "privateLinkServiceConnectionState")]
pub private_link_service_connection_state: PrivateLinkServiceConnectionState,
#[serde(rename = "provisioningState", default, skip_serializing_if = "Option::is_none")]
pub provisioning_state: Option<PrivateEndpointConnectionProvisioningState>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateEndpoint {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkServiceConnectionState {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<PrivateEndpointServiceConnectionStatus>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "actionsRequired", default, skip_serializing_if = "Option::is_none")]
pub actions_required: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PrivateEndpointServiceConnectionStatus {
Pending,
Approved,
Rejected,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum PrivateEndpointConnectionProvisioningState {
Succeeded,
Creating,
Deleting,
Failed,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ProxyResource {
#[serde(flatten)]
pub resource: Resource,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PrivateLinkResource>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResource {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PrivateLinkResourceProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct PrivateLinkResourceProperties {
#[serde(rename = "groupId", default, skip_serializing_if = "Option::is_none")]
pub group_id: Option<String>,
#[serde(rename = "requiredMembers", default, skip_serializing_if = "Vec::is_empty")]
pub required_members: Vec<String>,
#[serde(rename = "requiredZoneNames", default, skip_serializing_if = "Vec::is_empty")]
pub required_zone_names: Vec<String>,
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - DDRCTRL master register 0"]
pub ddrctrl_mstr: DDRCTRL_MSTR,
#[doc = "0x04 - DDRCTRL operating mode status register"]
pub ddrctrl_stat: DDRCTRL_STAT,
_reserved2: [u8; 8usize],
#[doc = "0x10 - Mode Register Read/Write Control Register 0. Note: Do not enable more than one of the following fields simultaneously: sw_init_int pda_en mpr_en"]
pub ddrctrl_mrctrl0: DDRCTRL_MRCTRL0,
#[doc = "0x14 - DDRCTRL mode register read/write control register 1"]
pub ddrctrl_mrctrl1: DDRCTRL_MRCTRL1,
#[doc = "0x18 - DDRCTRL mode register read/write status register"]
pub ddrctrl_mrstat: DDRCTRL_MRSTAT,
_reserved5: [u8; 4usize],
#[doc = "0x20 - DDRCTRL temperature derate enable register"]
pub ddrctrl_derateen: DDRCTRL_DERATEEN,
#[doc = "0x24 - DDRCTRL temperature derate interval register"]
pub ddrctrl_derateint: DDRCTRL_DERATEINT,
_reserved7: [u8; 8usize],
#[doc = "0x30 - DDRCTRL low power control register"]
pub ddrctrl_pwrctl: DDRCTRL_PWRCTL,
#[doc = "0x34 - DDRCTRL low power timing register"]
pub ddrctrl_pwrtmg: DDRCTRL_PWRTMG,
#[doc = "0x38 - DDRCTRL hardware low power control register"]
pub ddrctrl_hwlpctl: DDRCTRL_HWLPCTL,
_reserved10: [u8; 20usize],
#[doc = "0x50 - DDRCTRL refresh control register 0"]
pub ddrctrl_rfshctl0: DDRCTRL_RFSHCTL0,
_reserved11: [u8; 12usize],
#[doc = "0x60 - DDRCTRL refresh control register 3"]
pub ddrctrl_rfshctl3: DDRCTRL_RFSHCTL3,
#[doc = "0x64 - DDRCTRL refresh timing register"]
pub ddrctrl_rfshtmg: DDRCTRL_RFSHTMG,
_reserved13: [u8; 88usize],
#[doc = "0xc0 - DDRCTRL CRC parity control register 0"]
pub ddrctrl_crcparctl0: DDRCTRL_CRCPARCTL0,
_reserved14: [u8; 8usize],
#[doc = "0xcc - DDRCTRL CRC parity status register"]
pub ddrctrl_crcparstat: DDRCTRL_CRCPARSTAT,
#[doc = "0xd0 - DDRCTRL SDRAM initialization register 0"]
pub ddrctrl_init0: DDRCTRL_INIT0,
#[doc = "0xd4 - DDRCTRL SDRAM initialization register 1"]
pub ddrctrl_init1: DDRCTRL_INIT1,
#[doc = "0xd8 - DDRCTRL SDRAM initialization register 2"]
pub ddrctrl_init2: DDRCTRL_INIT2,
#[doc = "0xdc - DDRCTRL SDRAM initialization register 3"]
pub ddrctrl_init3: DDRCTRL_INIT3,
#[doc = "0xe0 - DDRCTRL SDRAM initialization register 4"]
pub ddrctrl_init4: DDRCTRL_INIT4,
#[doc = "0xe4 - DDRCTRL SDRAM initialization register 5"]
pub ddrctrl_init5: DDRCTRL_INIT5,
_reserved21: [u8; 8usize],
#[doc = "0xf0 - DDRCTRL DIMM control register"]
pub ddrctrl_dimmctl: DDRCTRL_DIMMCTL,
_reserved22: [u8; 12usize],
#[doc = "0x100 - DDRCTRL SDRAM timing register 0"]
pub ddrctrl_dramtmg0: DDRCTRL_DRAMTMG0,
#[doc = "0x104 - DDRCTRL SDRAM timing register 1"]
pub ddrctrl_dramtmg1: DDRCTRL_DRAMTMG1,
#[doc = "0x108 - DDRCTRL SDRAM timing register 2"]
pub ddrctrl_dramtmg2: DDRCTRL_DRAMTMG2,
#[doc = "0x10c - DDRCTRL SDRAM timing register 3"]
pub ddrctrl_dramtmg3: DDRCTRL_DRAMTMG3,
#[doc = "0x110 - DDRCTRL SDRAM timing register 4"]
pub ddrctrl_dramtmg4: DDRCTRL_DRAMTMG4,
#[doc = "0x114 - DDRCTRL SDRAM timing register 5"]
pub ddrctrl_dramtmg5: DDRCTRL_DRAMTMG5,
#[doc = "0x118 - DDRCTRL SDRAM timing register 6"]
pub ddrctrl_dramtmg6: DDRCTRL_DRAMTMG6,
#[doc = "0x11c - DDRCTRL SDRAM timing register 7"]
pub ddrctrl_dramtmg7: DDRCTRL_DRAMTMG7,
#[doc = "0x120 - DDRCTRL SDRAM timing register 8"]
pub ddrctrl_dramtmg8: DDRCTRL_DRAMTMG8,
_reserved31: [u8; 20usize],
#[doc = "0x138 - DDRCTRL SDRAM timing register 14"]
pub ddrctrl_dramtmg14: DDRCTRL_DRAMTMG14,
#[doc = "0x13c - DDRCTRL SDRAM timing register 15"]
pub ddrctrl_dramtmg15: DDRCTRL_DRAMTMG15,
_reserved33: [u8; 64usize],
#[doc = "0x180 - DDRCTRL ZQ control register 0"]
pub ddrctrl_zqctl0: DDRCTRL_ZQCTL0,
#[doc = "0x184 - DDRCTRL ZQ control register 1"]
pub ddrctrl_zqctl1: DDRCTRL_ZQCTL1,
#[doc = "0x188 - DDRCTRL ZQ control register 2"]
pub ddrctrl_zqctl2: DDRCTRL_ZQCTL2,
#[doc = "0x18c - DDRCTRL ZQ status register"]
pub ddrctrl_zqstat: DDRCTRL_ZQSTAT,
#[doc = "0x190 - DDRCTRL DFI timing register 0"]
pub ddrctrl_dfitmg0: DDRCTRL_DFITMG0,
#[doc = "0x194 - DDRCTRL DFI timing register 1"]
pub ddrctrl_dfitmg1: DDRCTRL_DFITMG1,
#[doc = "0x198 - DDRCTRL low power configuration register 0"]
pub ddrctrl_dfilpcfg0: DDRCTRL_DFILPCFG0,
_reserved40: [u8; 4usize],
#[doc = "0x1a0 - DDRCTRL DFI update register 0"]
pub ddrctrl_dfiupd0: DDRCTRL_DFIUPD0,
#[doc = "0x1a4 - DDRCTRL DFI update register 1"]
pub ddrctrl_dfiupd1: DDRCTRL_DFIUPD1,
#[doc = "0x1a8 - DDRCTRL DFI update register 2"]
pub ddrctrl_dfiupd2: DDRCTRL_DFIUPD2,
_reserved43: [u8; 4usize],
#[doc = "0x1b0 - DDRCTRL DFI miscellaneous control register"]
pub ddrctrl_dfimisc: DDRCTRL_DFIMISC,
_reserved44: [u8; 8usize],
#[doc = "0x1bc - DDRCTRL DFI status register"]
pub ddrctrl_dfistat: DDRCTRL_DFISTAT,
_reserved45: [u8; 4usize],
#[doc = "0x1c4 - DDRCTRL DFI PHY master register"]
pub ddrctrl_dfiphymstr: DDRCTRL_DFIPHYMSTR,
_reserved46: [u8; 60usize],
#[doc = "0x204 - DDRCTRL address map register 1"]
pub ddrctrl_addrmap1: DDRCTRL_ADDRMAP1,
#[doc = "0x208 - DDRCTRL address map register 2"]
pub ddrctrl_addrmap2: DDRCTRL_ADDRMAP2,
#[doc = "0x20c - DDRCTRL address map register 3"]
pub ddrctrl_addrmap3: DDRCTRL_ADDRMAP3,
#[doc = "0x210 - DDRCTRL address map register 4"]
pub ddrctrl_addrmap4: DDRCTRL_ADDRMAP4,
#[doc = "0x214 - DDRCTRL address map register 5"]
pub ddrctrl_addrmap5: DDRCTRL_ADDRMAP5,
#[doc = "0x218 - DDRCTRL address register 6"]
pub ddrctrl_addrmap6: DDRCTRL_ADDRMAP6,
_reserved52: [u8; 8usize],
#[doc = "0x224 - DDRCTRL address map register 9"]
pub ddrctrl_addrmap9: DDRCTRL_ADDRMAP9,
#[doc = "0x228 - DDRCTRL address map register 10"]
pub ddrctrl_addrmap10: DDRCTRL_ADDRMAP10,
#[doc = "0x22c - DDRCTRL address map register 11"]
pub ddrctrl_addrmap11: DDRCTRL_ADDRMAP11,
_reserved55: [u8; 16usize],
#[doc = "0x240 - DDRCTRL ODT configuration register"]
pub ddrctrl_odtcfg: DDRCTRL_ODTCFG,
#[doc = "0x244 - DDRCTRL ODT/Rank map register"]
pub ddrctrl_odtmap: DDRCTRL_ODTMAP,
_reserved57: [u8; 8usize],
#[doc = "0x250 - DDRCTRL scheduler control register"]
pub ddrctrl_sched: DDRCTRL_SCHED,
#[doc = "0x254 - DDRCTRL scheduler control register 1"]
pub ddrctrl_sched1: DDRCTRL_SCHED1,
_reserved59: [u8; 4usize],
#[doc = "0x25c - DDRCTRL high priority read CAM register 1"]
pub ddrctrl_perfhpr1: DDRCTRL_PERFHPR1,
_reserved60: [u8; 4usize],
#[doc = "0x264 - DDRCTRL low priority read CAM register 1"]
pub ddrctrl_perflpr1: DDRCTRL_PERFLPR1,
_reserved61: [u8; 4usize],
#[doc = "0x26c - DDRCTRL write CAM register 1"]
pub ddrctrl_perfwr1: DDRCTRL_PERFWR1,
_reserved62: [u8; 144usize],
#[doc = "0x300 - DDRCTRL debug register 0"]
pub ddrctrl_dbg0: DDRCTRL_DBG0,
#[doc = "0x304 - DDRCTRL debug register 1"]
pub ddrctrl_dbg1: DDRCTRL_DBG1,
#[doc = "0x308 - DDRCTRL CAM debug register"]
pub ddrctrl_dbgcam: DDRCTRL_DBGCAM,
#[doc = "0x30c - DDRCTRL command debug register"]
pub ddrctrl_dbgcmd: DDRCTRL_DBGCMD,
#[doc = "0x310 - DDRCTRL status debug register"]
pub ddrctrl_dbgstat: DDRCTRL_DBGSTAT,
_reserved67: [u8; 12usize],
#[doc = "0x320 - DDRCTRL software register programming control enable"]
pub ddrctrl_swctl: DDRCTRL_SWCTL,
#[doc = "0x324 - DDRCTRL software register programming control status"]
pub ddrctrl_swstat: DDRCTRL_SWSTAT,
_reserved69: [u8; 68usize],
#[doc = "0x36c - AXI Poison configuration register common for all AXI ports."]
pub ddrctrl_poisoncfg: DDRCTRL_POISONCFG,
#[doc = "0x370 - DDRCTRL AXI Poison status register"]
pub ddrctrl_poisonstat: DDRCTRL_POISONSTAT,
_reserved71: [u8; 136usize],
#[doc = "0x3fc - DDRCTRL port status register"]
pub ddrctrl_pstat: DDRCTRL_PSTAT,
#[doc = "0x400 - DDRCTRL port common configuration register"]
pub ddrctrl_pccfg: DDRCTRL_PCCFG,
#[doc = "0x404 - DDRCTRL port n configuration read register"]
pub ddrctrl_pcfgr_0: DDRCTRL_PCFGR_0,
#[doc = "0x408 - DDRCTRL port n configuration write register"]
pub ddrctrl_pcfgw_0: DDRCTRL_PCFGW_0,
_reserved75: [u8; 132usize],
#[doc = "0x490 - DDRCTRL port n control register"]
pub ddrctrl_pctrl_0: DDRCTRL_PCTRL_0,
#[doc = "0x494 - DDRCTRL port n read Q0S configuration register 0"]
pub ddrctrl_pcfgqos0_0: DDRCTRL_PCFGQOS0_0,
#[doc = "0x498 - DDRCTRL port n read Q0S configuration register 1"]
pub ddrctrl_pcfgqos1_0: DDRCTRL_PCFGQOS1_0,
#[doc = "0x49c - DDRCTRL port n write Q0S configuration register 0"]
pub ddrctrl_pcfgwqos0_0: DDRCTRL_PCFGWQOS0_0,
#[doc = "0x4a0 - DDRCTRL port n write Q0S configuration register 1"]
pub ddrctrl_pcfgwqos1_0: DDRCTRL_PCFGWQOS1_0,
_reserved80: [u8; 16usize],
#[doc = "0x4b4 - DDRCTRL port n configuration read register"]
pub ddrctrl_pcfgr_1: DDRCTRL_PCFGR_1,
#[doc = "0x4b8 - DDRCTRL port n configuration write register"]
pub ddrctrl_pcfgw_1: DDRCTRL_PCFGW_1,
_reserved82: [u8; 132usize],
#[doc = "0x540 - DDRCTRL port n control register"]
pub ddrctrl_pctrl_1: DDRCTRL_PCTRL_1,
#[doc = "0x544 - DDRCTRL port n read Q0S configuration register 0"]
pub ddrctrl_pcfgqos0_1: DDRCTRL_PCFGQOS0_1,
#[doc = "0x548 - DDRCTRL port n read Q0S configuration register 1"]
pub ddrctrl_pcfgqos1_1: DDRCTRL_PCFGQOS1_1,
#[doc = "0x54c - DDRCTRL port n write Q0S configuration register 0"]
pub ddrctrl_pcfgwqos0_1: DDRCTRL_PCFGWQOS0_1,
#[doc = "0x550 - DDRCTRL port n write Q0S configuration register 1"]
pub ddrctrl_pcfgwqos1_1: DDRCTRL_PCFGWQOS1_1,
}
#[doc = "DDRCTRL master register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_mstr](ddrctrl_mstr) module"]
pub type DDRCTRL_MSTR = crate::Reg<u32, _DDRCTRL_MSTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_MSTR;
#[doc = "`read()` method returns [ddrctrl_mstr::R](ddrctrl_mstr::R) reader structure"]
impl crate::Readable for DDRCTRL_MSTR {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_mstr::W](ddrctrl_mstr::W) writer structure"]
impl crate::Writable for DDRCTRL_MSTR {}
#[doc = "DDRCTRL master register 0"]
pub mod ddrctrl_mstr;
#[doc = "DDRCTRL operating mode status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_stat](ddrctrl_stat) module"]
pub type DDRCTRL_STAT = crate::Reg<u32, _DDRCTRL_STAT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_STAT;
#[doc = "`read()` method returns [ddrctrl_stat::R](ddrctrl_stat::R) reader structure"]
impl crate::Readable for DDRCTRL_STAT {}
#[doc = "DDRCTRL operating mode status register"]
pub mod ddrctrl_stat;
#[doc = "Mode Register Read/Write Control Register 0. Note: Do not enable more than one of the following fields simultaneously: sw_init_int pda_en mpr_en\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_mrctrl0](ddrctrl_mrctrl0) module"]
pub type DDRCTRL_MRCTRL0 = crate::Reg<u32, _DDRCTRL_MRCTRL0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_MRCTRL0;
#[doc = "`read()` method returns [ddrctrl_mrctrl0::R](ddrctrl_mrctrl0::R) reader structure"]
impl crate::Readable for DDRCTRL_MRCTRL0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_mrctrl0::W](ddrctrl_mrctrl0::W) writer structure"]
impl crate::Writable for DDRCTRL_MRCTRL0 {}
#[doc = "Mode Register Read/Write Control Register 0. Note: Do not enable more than one of the following fields simultaneously: sw_init_int pda_en mpr_en"]
pub mod ddrctrl_mrctrl0;
#[doc = "DDRCTRL mode register read/write control register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_mrctrl1](ddrctrl_mrctrl1) module"]
pub type DDRCTRL_MRCTRL1 = crate::Reg<u32, _DDRCTRL_MRCTRL1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_MRCTRL1;
#[doc = "`read()` method returns [ddrctrl_mrctrl1::R](ddrctrl_mrctrl1::R) reader structure"]
impl crate::Readable for DDRCTRL_MRCTRL1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_mrctrl1::W](ddrctrl_mrctrl1::W) writer structure"]
impl crate::Writable for DDRCTRL_MRCTRL1 {}
#[doc = "DDRCTRL mode register read/write control register 1"]
pub mod ddrctrl_mrctrl1;
#[doc = "DDRCTRL mode register read/write status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_mrstat](ddrctrl_mrstat) module"]
pub type DDRCTRL_MRSTAT = crate::Reg<u32, _DDRCTRL_MRSTAT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_MRSTAT;
#[doc = "`read()` method returns [ddrctrl_mrstat::R](ddrctrl_mrstat::R) reader structure"]
impl crate::Readable for DDRCTRL_MRSTAT {}
#[doc = "DDRCTRL mode register read/write status register"]
pub mod ddrctrl_mrstat;
#[doc = "DDRCTRL temperature derate enable register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_derateen](ddrctrl_derateen) module"]
pub type DDRCTRL_DERATEEN = crate::Reg<u32, _DDRCTRL_DERATEEN>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DERATEEN;
#[doc = "`read()` method returns [ddrctrl_derateen::R](ddrctrl_derateen::R) reader structure"]
impl crate::Readable for DDRCTRL_DERATEEN {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_derateen::W](ddrctrl_derateen::W) writer structure"]
impl crate::Writable for DDRCTRL_DERATEEN {}
#[doc = "DDRCTRL temperature derate enable register"]
pub mod ddrctrl_derateen;
#[doc = "DDRCTRL temperature derate interval register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_derateint](ddrctrl_derateint) module"]
pub type DDRCTRL_DERATEINT = crate::Reg<u32, _DDRCTRL_DERATEINT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DERATEINT;
#[doc = "`read()` method returns [ddrctrl_derateint::R](ddrctrl_derateint::R) reader structure"]
impl crate::Readable for DDRCTRL_DERATEINT {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_derateint::W](ddrctrl_derateint::W) writer structure"]
impl crate::Writable for DDRCTRL_DERATEINT {}
#[doc = "DDRCTRL temperature derate interval register"]
pub mod ddrctrl_derateint;
#[doc = "DDRCTRL low power control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pwrctl](ddrctrl_pwrctl) module"]
pub type DDRCTRL_PWRCTL = crate::Reg<u32, _DDRCTRL_PWRCTL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PWRCTL;
#[doc = "`read()` method returns [ddrctrl_pwrctl::R](ddrctrl_pwrctl::R) reader structure"]
impl crate::Readable for DDRCTRL_PWRCTL {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pwrctl::W](ddrctrl_pwrctl::W) writer structure"]
impl crate::Writable for DDRCTRL_PWRCTL {}
#[doc = "DDRCTRL low power control register"]
pub mod ddrctrl_pwrctl;
#[doc = "DDRCTRL low power timing register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pwrtmg](ddrctrl_pwrtmg) module"]
pub type DDRCTRL_PWRTMG = crate::Reg<u32, _DDRCTRL_PWRTMG>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PWRTMG;
#[doc = "`read()` method returns [ddrctrl_pwrtmg::R](ddrctrl_pwrtmg::R) reader structure"]
impl crate::Readable for DDRCTRL_PWRTMG {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pwrtmg::W](ddrctrl_pwrtmg::W) writer structure"]
impl crate::Writable for DDRCTRL_PWRTMG {}
#[doc = "DDRCTRL low power timing register"]
pub mod ddrctrl_pwrtmg;
#[doc = "DDRCTRL hardware low power control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_hwlpctl](ddrctrl_hwlpctl) module"]
pub type DDRCTRL_HWLPCTL = crate::Reg<u32, _DDRCTRL_HWLPCTL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_HWLPCTL;
#[doc = "`read()` method returns [ddrctrl_hwlpctl::R](ddrctrl_hwlpctl::R) reader structure"]
impl crate::Readable for DDRCTRL_HWLPCTL {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_hwlpctl::W](ddrctrl_hwlpctl::W) writer structure"]
impl crate::Writable for DDRCTRL_HWLPCTL {}
#[doc = "DDRCTRL hardware low power control register"]
pub mod ddrctrl_hwlpctl;
#[doc = "DDRCTRL refresh control register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_rfshctl0](ddrctrl_rfshctl0) module"]
pub type DDRCTRL_RFSHCTL0 = crate::Reg<u32, _DDRCTRL_RFSHCTL0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_RFSHCTL0;
#[doc = "`read()` method returns [ddrctrl_rfshctl0::R](ddrctrl_rfshctl0::R) reader structure"]
impl crate::Readable for DDRCTRL_RFSHCTL0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_rfshctl0::W](ddrctrl_rfshctl0::W) writer structure"]
impl crate::Writable for DDRCTRL_RFSHCTL0 {}
#[doc = "DDRCTRL refresh control register 0"]
pub mod ddrctrl_rfshctl0;
#[doc = "DDRCTRL refresh control register 3\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_rfshctl3](ddrctrl_rfshctl3) module"]
pub type DDRCTRL_RFSHCTL3 = crate::Reg<u32, _DDRCTRL_RFSHCTL3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_RFSHCTL3;
#[doc = "`read()` method returns [ddrctrl_rfshctl3::R](ddrctrl_rfshctl3::R) reader structure"]
impl crate::Readable for DDRCTRL_RFSHCTL3 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_rfshctl3::W](ddrctrl_rfshctl3::W) writer structure"]
impl crate::Writable for DDRCTRL_RFSHCTL3 {}
#[doc = "DDRCTRL refresh control register 3"]
pub mod ddrctrl_rfshctl3;
#[doc = "DDRCTRL refresh timing register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_rfshtmg](ddrctrl_rfshtmg) module"]
pub type DDRCTRL_RFSHTMG = crate::Reg<u32, _DDRCTRL_RFSHTMG>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_RFSHTMG;
#[doc = "`read()` method returns [ddrctrl_rfshtmg::R](ddrctrl_rfshtmg::R) reader structure"]
impl crate::Readable for DDRCTRL_RFSHTMG {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_rfshtmg::W](ddrctrl_rfshtmg::W) writer structure"]
impl crate::Writable for DDRCTRL_RFSHTMG {}
#[doc = "DDRCTRL refresh timing register"]
pub mod ddrctrl_rfshtmg;
#[doc = "DDRCTRL CRC parity control register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_crcparctl0](ddrctrl_crcparctl0) module"]
pub type DDRCTRL_CRCPARCTL0 = crate::Reg<u32, _DDRCTRL_CRCPARCTL0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_CRCPARCTL0;
#[doc = "`read()` method returns [ddrctrl_crcparctl0::R](ddrctrl_crcparctl0::R) reader structure"]
impl crate::Readable for DDRCTRL_CRCPARCTL0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_crcparctl0::W](ddrctrl_crcparctl0::W) writer structure"]
impl crate::Writable for DDRCTRL_CRCPARCTL0 {}
#[doc = "DDRCTRL CRC parity control register 0"]
pub mod ddrctrl_crcparctl0;
#[doc = "DDRCTRL CRC parity status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_crcparstat](ddrctrl_crcparstat) module"]
pub type DDRCTRL_CRCPARSTAT = crate::Reg<u32, _DDRCTRL_CRCPARSTAT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_CRCPARSTAT;
#[doc = "`read()` method returns [ddrctrl_crcparstat::R](ddrctrl_crcparstat::R) reader structure"]
impl crate::Readable for DDRCTRL_CRCPARSTAT {}
#[doc = "DDRCTRL CRC parity status register"]
pub mod ddrctrl_crcparstat;
#[doc = "DDRCTRL SDRAM initialization register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_init0](ddrctrl_init0) module"]
pub type DDRCTRL_INIT0 = crate::Reg<u32, _DDRCTRL_INIT0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_INIT0;
#[doc = "`read()` method returns [ddrctrl_init0::R](ddrctrl_init0::R) reader structure"]
impl crate::Readable for DDRCTRL_INIT0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_init0::W](ddrctrl_init0::W) writer structure"]
impl crate::Writable for DDRCTRL_INIT0 {}
#[doc = "DDRCTRL SDRAM initialization register 0"]
pub mod ddrctrl_init0;
#[doc = "DDRCTRL SDRAM initialization register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_init1](ddrctrl_init1) module"]
pub type DDRCTRL_INIT1 = crate::Reg<u32, _DDRCTRL_INIT1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_INIT1;
#[doc = "`read()` method returns [ddrctrl_init1::R](ddrctrl_init1::R) reader structure"]
impl crate::Readable for DDRCTRL_INIT1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_init1::W](ddrctrl_init1::W) writer structure"]
impl crate::Writable for DDRCTRL_INIT1 {}
#[doc = "DDRCTRL SDRAM initialization register 1"]
pub mod ddrctrl_init1;
#[doc = "DDRCTRL SDRAM initialization register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_init2](ddrctrl_init2) module"]
pub type DDRCTRL_INIT2 = crate::Reg<u32, _DDRCTRL_INIT2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_INIT2;
#[doc = "`read()` method returns [ddrctrl_init2::R](ddrctrl_init2::R) reader structure"]
impl crate::Readable for DDRCTRL_INIT2 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_init2::W](ddrctrl_init2::W) writer structure"]
impl crate::Writable for DDRCTRL_INIT2 {}
#[doc = "DDRCTRL SDRAM initialization register 2"]
pub mod ddrctrl_init2;
#[doc = "DDRCTRL SDRAM initialization register 3\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_init3](ddrctrl_init3) module"]
pub type DDRCTRL_INIT3 = crate::Reg<u32, _DDRCTRL_INIT3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_INIT3;
#[doc = "`read()` method returns [ddrctrl_init3::R](ddrctrl_init3::R) reader structure"]
impl crate::Readable for DDRCTRL_INIT3 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_init3::W](ddrctrl_init3::W) writer structure"]
impl crate::Writable for DDRCTRL_INIT3 {}
#[doc = "DDRCTRL SDRAM initialization register 3"]
pub mod ddrctrl_init3;
#[doc = "DDRCTRL SDRAM initialization register 4\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_init4](ddrctrl_init4) module"]
pub type DDRCTRL_INIT4 = crate::Reg<u32, _DDRCTRL_INIT4>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_INIT4;
#[doc = "`read()` method returns [ddrctrl_init4::R](ddrctrl_init4::R) reader structure"]
impl crate::Readable for DDRCTRL_INIT4 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_init4::W](ddrctrl_init4::W) writer structure"]
impl crate::Writable for DDRCTRL_INIT4 {}
#[doc = "DDRCTRL SDRAM initialization register 4"]
pub mod ddrctrl_init4;
#[doc = "DDRCTRL SDRAM initialization register 5\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_init5](ddrctrl_init5) module"]
pub type DDRCTRL_INIT5 = crate::Reg<u32, _DDRCTRL_INIT5>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_INIT5;
#[doc = "`read()` method returns [ddrctrl_init5::R](ddrctrl_init5::R) reader structure"]
impl crate::Readable for DDRCTRL_INIT5 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_init5::W](ddrctrl_init5::W) writer structure"]
impl crate::Writable for DDRCTRL_INIT5 {}
#[doc = "DDRCTRL SDRAM initialization register 5"]
pub mod ddrctrl_init5;
#[doc = "DDRCTRL DIMM control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dimmctl](ddrctrl_dimmctl) module"]
pub type DDRCTRL_DIMMCTL = crate::Reg<u32, _DDRCTRL_DIMMCTL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DIMMCTL;
#[doc = "`read()` method returns [ddrctrl_dimmctl::R](ddrctrl_dimmctl::R) reader structure"]
impl crate::Readable for DDRCTRL_DIMMCTL {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dimmctl::W](ddrctrl_dimmctl::W) writer structure"]
impl crate::Writable for DDRCTRL_DIMMCTL {}
#[doc = "DDRCTRL DIMM control register"]
pub mod ddrctrl_dimmctl;
#[doc = "DDRCTRL SDRAM timing register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dramtmg0](ddrctrl_dramtmg0) module"]
pub type DDRCTRL_DRAMTMG0 = crate::Reg<u32, _DDRCTRL_DRAMTMG0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DRAMTMG0;
#[doc = "`read()` method returns [ddrctrl_dramtmg0::R](ddrctrl_dramtmg0::R) reader structure"]
impl crate::Readable for DDRCTRL_DRAMTMG0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dramtmg0::W](ddrctrl_dramtmg0::W) writer structure"]
impl crate::Writable for DDRCTRL_DRAMTMG0 {}
#[doc = "DDRCTRL SDRAM timing register 0"]
pub mod ddrctrl_dramtmg0;
#[doc = "DDRCTRL SDRAM timing register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dramtmg1](ddrctrl_dramtmg1) module"]
pub type DDRCTRL_DRAMTMG1 = crate::Reg<u32, _DDRCTRL_DRAMTMG1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DRAMTMG1;
#[doc = "`read()` method returns [ddrctrl_dramtmg1::R](ddrctrl_dramtmg1::R) reader structure"]
impl crate::Readable for DDRCTRL_DRAMTMG1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dramtmg1::W](ddrctrl_dramtmg1::W) writer structure"]
impl crate::Writable for DDRCTRL_DRAMTMG1 {}
#[doc = "DDRCTRL SDRAM timing register 1"]
pub mod ddrctrl_dramtmg1;
#[doc = "DDRCTRL SDRAM timing register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dramtmg2](ddrctrl_dramtmg2) module"]
pub type DDRCTRL_DRAMTMG2 = crate::Reg<u32, _DDRCTRL_DRAMTMG2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DRAMTMG2;
#[doc = "`read()` method returns [ddrctrl_dramtmg2::R](ddrctrl_dramtmg2::R) reader structure"]
impl crate::Readable for DDRCTRL_DRAMTMG2 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dramtmg2::W](ddrctrl_dramtmg2::W) writer structure"]
impl crate::Writable for DDRCTRL_DRAMTMG2 {}
#[doc = "DDRCTRL SDRAM timing register 2"]
pub mod ddrctrl_dramtmg2;
#[doc = "DDRCTRL SDRAM timing register 3\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dramtmg3](ddrctrl_dramtmg3) module"]
pub type DDRCTRL_DRAMTMG3 = crate::Reg<u32, _DDRCTRL_DRAMTMG3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DRAMTMG3;
#[doc = "`read()` method returns [ddrctrl_dramtmg3::R](ddrctrl_dramtmg3::R) reader structure"]
impl crate::Readable for DDRCTRL_DRAMTMG3 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dramtmg3::W](ddrctrl_dramtmg3::W) writer structure"]
impl crate::Writable for DDRCTRL_DRAMTMG3 {}
#[doc = "DDRCTRL SDRAM timing register 3"]
pub mod ddrctrl_dramtmg3;
#[doc = "DDRCTRL SDRAM timing register 4\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dramtmg4](ddrctrl_dramtmg4) module"]
pub type DDRCTRL_DRAMTMG4 = crate::Reg<u32, _DDRCTRL_DRAMTMG4>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DRAMTMG4;
#[doc = "`read()` method returns [ddrctrl_dramtmg4::R](ddrctrl_dramtmg4::R) reader structure"]
impl crate::Readable for DDRCTRL_DRAMTMG4 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dramtmg4::W](ddrctrl_dramtmg4::W) writer structure"]
impl crate::Writable for DDRCTRL_DRAMTMG4 {}
#[doc = "DDRCTRL SDRAM timing register 4"]
pub mod ddrctrl_dramtmg4;
#[doc = "DDRCTRL SDRAM timing register 5\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dramtmg5](ddrctrl_dramtmg5) module"]
pub type DDRCTRL_DRAMTMG5 = crate::Reg<u32, _DDRCTRL_DRAMTMG5>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DRAMTMG5;
#[doc = "`read()` method returns [ddrctrl_dramtmg5::R](ddrctrl_dramtmg5::R) reader structure"]
impl crate::Readable for DDRCTRL_DRAMTMG5 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dramtmg5::W](ddrctrl_dramtmg5::W) writer structure"]
impl crate::Writable for DDRCTRL_DRAMTMG5 {}
#[doc = "DDRCTRL SDRAM timing register 5"]
pub mod ddrctrl_dramtmg5;
#[doc = "DDRCTRL SDRAM timing register 6\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dramtmg6](ddrctrl_dramtmg6) module"]
pub type DDRCTRL_DRAMTMG6 = crate::Reg<u32, _DDRCTRL_DRAMTMG6>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DRAMTMG6;
#[doc = "`read()` method returns [ddrctrl_dramtmg6::R](ddrctrl_dramtmg6::R) reader structure"]
impl crate::Readable for DDRCTRL_DRAMTMG6 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dramtmg6::W](ddrctrl_dramtmg6::W) writer structure"]
impl crate::Writable for DDRCTRL_DRAMTMG6 {}
#[doc = "DDRCTRL SDRAM timing register 6"]
pub mod ddrctrl_dramtmg6;
#[doc = "DDRCTRL SDRAM timing register 7\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dramtmg7](ddrctrl_dramtmg7) module"]
pub type DDRCTRL_DRAMTMG7 = crate::Reg<u32, _DDRCTRL_DRAMTMG7>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DRAMTMG7;
#[doc = "`read()` method returns [ddrctrl_dramtmg7::R](ddrctrl_dramtmg7::R) reader structure"]
impl crate::Readable for DDRCTRL_DRAMTMG7 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dramtmg7::W](ddrctrl_dramtmg7::W) writer structure"]
impl crate::Writable for DDRCTRL_DRAMTMG7 {}
#[doc = "DDRCTRL SDRAM timing register 7"]
pub mod ddrctrl_dramtmg7;
#[doc = "DDRCTRL SDRAM timing register 8\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dramtmg8](ddrctrl_dramtmg8) module"]
pub type DDRCTRL_DRAMTMG8 = crate::Reg<u32, _DDRCTRL_DRAMTMG8>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DRAMTMG8;
#[doc = "`read()` method returns [ddrctrl_dramtmg8::R](ddrctrl_dramtmg8::R) reader structure"]
impl crate::Readable for DDRCTRL_DRAMTMG8 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dramtmg8::W](ddrctrl_dramtmg8::W) writer structure"]
impl crate::Writable for DDRCTRL_DRAMTMG8 {}
#[doc = "DDRCTRL SDRAM timing register 8"]
pub mod ddrctrl_dramtmg8;
#[doc = "DDRCTRL SDRAM timing register 14\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dramtmg14](ddrctrl_dramtmg14) module"]
pub type DDRCTRL_DRAMTMG14 = crate::Reg<u32, _DDRCTRL_DRAMTMG14>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DRAMTMG14;
#[doc = "`read()` method returns [ddrctrl_dramtmg14::R](ddrctrl_dramtmg14::R) reader structure"]
impl crate::Readable for DDRCTRL_DRAMTMG14 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dramtmg14::W](ddrctrl_dramtmg14::W) writer structure"]
impl crate::Writable for DDRCTRL_DRAMTMG14 {}
#[doc = "DDRCTRL SDRAM timing register 14"]
pub mod ddrctrl_dramtmg14;
#[doc = "DDRCTRL SDRAM timing register 15\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dramtmg15](ddrctrl_dramtmg15) module"]
pub type DDRCTRL_DRAMTMG15 = crate::Reg<u32, _DDRCTRL_DRAMTMG15>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DRAMTMG15;
#[doc = "`read()` method returns [ddrctrl_dramtmg15::R](ddrctrl_dramtmg15::R) reader structure"]
impl crate::Readable for DDRCTRL_DRAMTMG15 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dramtmg15::W](ddrctrl_dramtmg15::W) writer structure"]
impl crate::Writable for DDRCTRL_DRAMTMG15 {}
#[doc = "DDRCTRL SDRAM timing register 15"]
pub mod ddrctrl_dramtmg15;
#[doc = "DDRCTRL ZQ control register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_zqctl0](ddrctrl_zqctl0) module"]
pub type DDRCTRL_ZQCTL0 = crate::Reg<u32, _DDRCTRL_ZQCTL0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ZQCTL0;
#[doc = "`read()` method returns [ddrctrl_zqctl0::R](ddrctrl_zqctl0::R) reader structure"]
impl crate::Readable for DDRCTRL_ZQCTL0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_zqctl0::W](ddrctrl_zqctl0::W) writer structure"]
impl crate::Writable for DDRCTRL_ZQCTL0 {}
#[doc = "DDRCTRL ZQ control register 0"]
pub mod ddrctrl_zqctl0;
#[doc = "DDRCTRL ZQ control register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_zqctl1](ddrctrl_zqctl1) module"]
pub type DDRCTRL_ZQCTL1 = crate::Reg<u32, _DDRCTRL_ZQCTL1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ZQCTL1;
#[doc = "`read()` method returns [ddrctrl_zqctl1::R](ddrctrl_zqctl1::R) reader structure"]
impl crate::Readable for DDRCTRL_ZQCTL1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_zqctl1::W](ddrctrl_zqctl1::W) writer structure"]
impl crate::Writable for DDRCTRL_ZQCTL1 {}
#[doc = "DDRCTRL ZQ control register 1"]
pub mod ddrctrl_zqctl1;
#[doc = "DDRCTRL ZQ control register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_zqctl2](ddrctrl_zqctl2) module"]
pub type DDRCTRL_ZQCTL2 = crate::Reg<u32, _DDRCTRL_ZQCTL2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ZQCTL2;
#[doc = "`read()` method returns [ddrctrl_zqctl2::R](ddrctrl_zqctl2::R) reader structure"]
impl crate::Readable for DDRCTRL_ZQCTL2 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_zqctl2::W](ddrctrl_zqctl2::W) writer structure"]
impl crate::Writable for DDRCTRL_ZQCTL2 {}
#[doc = "DDRCTRL ZQ control register 2"]
pub mod ddrctrl_zqctl2;
#[doc = "DDRCTRL ZQ status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_zqstat](ddrctrl_zqstat) module"]
pub type DDRCTRL_ZQSTAT = crate::Reg<u32, _DDRCTRL_ZQSTAT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ZQSTAT;
#[doc = "`read()` method returns [ddrctrl_zqstat::R](ddrctrl_zqstat::R) reader structure"]
impl crate::Readable for DDRCTRL_ZQSTAT {}
#[doc = "DDRCTRL ZQ status register"]
pub mod ddrctrl_zqstat;
#[doc = "DDRCTRL DFI timing register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dfitmg0](ddrctrl_dfitmg0) module"]
pub type DDRCTRL_DFITMG0 = crate::Reg<u32, _DDRCTRL_DFITMG0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DFITMG0;
#[doc = "`read()` method returns [ddrctrl_dfitmg0::R](ddrctrl_dfitmg0::R) reader structure"]
impl crate::Readable for DDRCTRL_DFITMG0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dfitmg0::W](ddrctrl_dfitmg0::W) writer structure"]
impl crate::Writable for DDRCTRL_DFITMG0 {}
#[doc = "DDRCTRL DFI timing register 0"]
pub mod ddrctrl_dfitmg0;
#[doc = "DDRCTRL DFI timing register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dfitmg1](ddrctrl_dfitmg1) module"]
pub type DDRCTRL_DFITMG1 = crate::Reg<u32, _DDRCTRL_DFITMG1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DFITMG1;
#[doc = "`read()` method returns [ddrctrl_dfitmg1::R](ddrctrl_dfitmg1::R) reader structure"]
impl crate::Readable for DDRCTRL_DFITMG1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dfitmg1::W](ddrctrl_dfitmg1::W) writer structure"]
impl crate::Writable for DDRCTRL_DFITMG1 {}
#[doc = "DDRCTRL DFI timing register 1"]
pub mod ddrctrl_dfitmg1;
#[doc = "DDRCTRL low power configuration register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dfilpcfg0](ddrctrl_dfilpcfg0) module"]
pub type DDRCTRL_DFILPCFG0 = crate::Reg<u32, _DDRCTRL_DFILPCFG0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DFILPCFG0;
#[doc = "`read()` method returns [ddrctrl_dfilpcfg0::R](ddrctrl_dfilpcfg0::R) reader structure"]
impl crate::Readable for DDRCTRL_DFILPCFG0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dfilpcfg0::W](ddrctrl_dfilpcfg0::W) writer structure"]
impl crate::Writable for DDRCTRL_DFILPCFG0 {}
#[doc = "DDRCTRL low power configuration register 0"]
pub mod ddrctrl_dfilpcfg0;
#[doc = "DDRCTRL DFI update register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dfiupd0](ddrctrl_dfiupd0) module"]
pub type DDRCTRL_DFIUPD0 = crate::Reg<u32, _DDRCTRL_DFIUPD0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DFIUPD0;
#[doc = "`read()` method returns [ddrctrl_dfiupd0::R](ddrctrl_dfiupd0::R) reader structure"]
impl crate::Readable for DDRCTRL_DFIUPD0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dfiupd0::W](ddrctrl_dfiupd0::W) writer structure"]
impl crate::Writable for DDRCTRL_DFIUPD0 {}
#[doc = "DDRCTRL DFI update register 0"]
pub mod ddrctrl_dfiupd0;
#[doc = "DDRCTRL DFI update register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dfiupd1](ddrctrl_dfiupd1) module"]
pub type DDRCTRL_DFIUPD1 = crate::Reg<u32, _DDRCTRL_DFIUPD1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DFIUPD1;
#[doc = "`read()` method returns [ddrctrl_dfiupd1::R](ddrctrl_dfiupd1::R) reader structure"]
impl crate::Readable for DDRCTRL_DFIUPD1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dfiupd1::W](ddrctrl_dfiupd1::W) writer structure"]
impl crate::Writable for DDRCTRL_DFIUPD1 {}
#[doc = "DDRCTRL DFI update register 1"]
pub mod ddrctrl_dfiupd1;
#[doc = "DDRCTRL DFI update register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dfiupd2](ddrctrl_dfiupd2) module"]
pub type DDRCTRL_DFIUPD2 = crate::Reg<u32, _DDRCTRL_DFIUPD2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DFIUPD2;
#[doc = "`read()` method returns [ddrctrl_dfiupd2::R](ddrctrl_dfiupd2::R) reader structure"]
impl crate::Readable for DDRCTRL_DFIUPD2 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dfiupd2::W](ddrctrl_dfiupd2::W) writer structure"]
impl crate::Writable for DDRCTRL_DFIUPD2 {}
#[doc = "DDRCTRL DFI update register 2"]
pub mod ddrctrl_dfiupd2;
#[doc = "DDRCTRL DFI miscellaneous control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dfimisc](ddrctrl_dfimisc) module"]
pub type DDRCTRL_DFIMISC = crate::Reg<u32, _DDRCTRL_DFIMISC>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DFIMISC;
#[doc = "`read()` method returns [ddrctrl_dfimisc::R](ddrctrl_dfimisc::R) reader structure"]
impl crate::Readable for DDRCTRL_DFIMISC {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dfimisc::W](ddrctrl_dfimisc::W) writer structure"]
impl crate::Writable for DDRCTRL_DFIMISC {}
#[doc = "DDRCTRL DFI miscellaneous control register"]
pub mod ddrctrl_dfimisc;
#[doc = "DDRCTRL DFI status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dfistat](ddrctrl_dfistat) module"]
pub type DDRCTRL_DFISTAT = crate::Reg<u32, _DDRCTRL_DFISTAT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DFISTAT;
#[doc = "`read()` method returns [ddrctrl_dfistat::R](ddrctrl_dfistat::R) reader structure"]
impl crate::Readable for DDRCTRL_DFISTAT {}
#[doc = "DDRCTRL DFI status register"]
pub mod ddrctrl_dfistat;
#[doc = "DDRCTRL DFI PHY master register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dfiphymstr](ddrctrl_dfiphymstr) module"]
pub type DDRCTRL_DFIPHYMSTR = crate::Reg<u32, _DDRCTRL_DFIPHYMSTR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DFIPHYMSTR;
#[doc = "`read()` method returns [ddrctrl_dfiphymstr::R](ddrctrl_dfiphymstr::R) reader structure"]
impl crate::Readable for DDRCTRL_DFIPHYMSTR {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dfiphymstr::W](ddrctrl_dfiphymstr::W) writer structure"]
impl crate::Writable for DDRCTRL_DFIPHYMSTR {}
#[doc = "DDRCTRL DFI PHY master register"]
pub mod ddrctrl_dfiphymstr;
#[doc = "DDRCTRL address map register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_addrmap1](ddrctrl_addrmap1) module"]
pub type DDRCTRL_ADDRMAP1 = crate::Reg<u32, _DDRCTRL_ADDRMAP1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ADDRMAP1;
#[doc = "`read()` method returns [ddrctrl_addrmap1::R](ddrctrl_addrmap1::R) reader structure"]
impl crate::Readable for DDRCTRL_ADDRMAP1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_addrmap1::W](ddrctrl_addrmap1::W) writer structure"]
impl crate::Writable for DDRCTRL_ADDRMAP1 {}
#[doc = "DDRCTRL address map register 1"]
pub mod ddrctrl_addrmap1;
#[doc = "DDRCTRL address map register 2\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_addrmap2](ddrctrl_addrmap2) module"]
pub type DDRCTRL_ADDRMAP2 = crate::Reg<u32, _DDRCTRL_ADDRMAP2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ADDRMAP2;
#[doc = "`read()` method returns [ddrctrl_addrmap2::R](ddrctrl_addrmap2::R) reader structure"]
impl crate::Readable for DDRCTRL_ADDRMAP2 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_addrmap2::W](ddrctrl_addrmap2::W) writer structure"]
impl crate::Writable for DDRCTRL_ADDRMAP2 {}
#[doc = "DDRCTRL address map register 2"]
pub mod ddrctrl_addrmap2;
#[doc = "DDRCTRL address map register 3\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_addrmap3](ddrctrl_addrmap3) module"]
pub type DDRCTRL_ADDRMAP3 = crate::Reg<u32, _DDRCTRL_ADDRMAP3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ADDRMAP3;
#[doc = "`read()` method returns [ddrctrl_addrmap3::R](ddrctrl_addrmap3::R) reader structure"]
impl crate::Readable for DDRCTRL_ADDRMAP3 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_addrmap3::W](ddrctrl_addrmap3::W) writer structure"]
impl crate::Writable for DDRCTRL_ADDRMAP3 {}
#[doc = "DDRCTRL address map register 3"]
pub mod ddrctrl_addrmap3;
#[doc = "DDRCTRL address map register 4\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_addrmap4](ddrctrl_addrmap4) module"]
pub type DDRCTRL_ADDRMAP4 = crate::Reg<u32, _DDRCTRL_ADDRMAP4>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ADDRMAP4;
#[doc = "`read()` method returns [ddrctrl_addrmap4::R](ddrctrl_addrmap4::R) reader structure"]
impl crate::Readable for DDRCTRL_ADDRMAP4 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_addrmap4::W](ddrctrl_addrmap4::W) writer structure"]
impl crate::Writable for DDRCTRL_ADDRMAP4 {}
#[doc = "DDRCTRL address map register 4"]
pub mod ddrctrl_addrmap4;
#[doc = "DDRCTRL address map register 5\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_addrmap5](ddrctrl_addrmap5) module"]
pub type DDRCTRL_ADDRMAP5 = crate::Reg<u32, _DDRCTRL_ADDRMAP5>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ADDRMAP5;
#[doc = "`read()` method returns [ddrctrl_addrmap5::R](ddrctrl_addrmap5::R) reader structure"]
impl crate::Readable for DDRCTRL_ADDRMAP5 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_addrmap5::W](ddrctrl_addrmap5::W) writer structure"]
impl crate::Writable for DDRCTRL_ADDRMAP5 {}
#[doc = "DDRCTRL address map register 5"]
pub mod ddrctrl_addrmap5;
#[doc = "DDRCTRL address register 6\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_addrmap6](ddrctrl_addrmap6) module"]
pub type DDRCTRL_ADDRMAP6 = crate::Reg<u32, _DDRCTRL_ADDRMAP6>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ADDRMAP6;
#[doc = "`read()` method returns [ddrctrl_addrmap6::R](ddrctrl_addrmap6::R) reader structure"]
impl crate::Readable for DDRCTRL_ADDRMAP6 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_addrmap6::W](ddrctrl_addrmap6::W) writer structure"]
impl crate::Writable for DDRCTRL_ADDRMAP6 {}
#[doc = "DDRCTRL address register 6"]
pub mod ddrctrl_addrmap6;
#[doc = "DDRCTRL address map register 9\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_addrmap9](ddrctrl_addrmap9) module"]
pub type DDRCTRL_ADDRMAP9 = crate::Reg<u32, _DDRCTRL_ADDRMAP9>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ADDRMAP9;
#[doc = "`read()` method returns [ddrctrl_addrmap9::R](ddrctrl_addrmap9::R) reader structure"]
impl crate::Readable for DDRCTRL_ADDRMAP9 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_addrmap9::W](ddrctrl_addrmap9::W) writer structure"]
impl crate::Writable for DDRCTRL_ADDRMAP9 {}
#[doc = "DDRCTRL address map register 9"]
pub mod ddrctrl_addrmap9;
#[doc = "DDRCTRL address map register 10\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_addrmap10](ddrctrl_addrmap10) module"]
pub type DDRCTRL_ADDRMAP10 = crate::Reg<u32, _DDRCTRL_ADDRMAP10>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ADDRMAP10;
#[doc = "`read()` method returns [ddrctrl_addrmap10::R](ddrctrl_addrmap10::R) reader structure"]
impl crate::Readable for DDRCTRL_ADDRMAP10 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_addrmap10::W](ddrctrl_addrmap10::W) writer structure"]
impl crate::Writable for DDRCTRL_ADDRMAP10 {}
#[doc = "DDRCTRL address map register 10"]
pub mod ddrctrl_addrmap10;
#[doc = "DDRCTRL address map register 11\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_addrmap11](ddrctrl_addrmap11) module"]
pub type DDRCTRL_ADDRMAP11 = crate::Reg<u32, _DDRCTRL_ADDRMAP11>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ADDRMAP11;
#[doc = "`read()` method returns [ddrctrl_addrmap11::R](ddrctrl_addrmap11::R) reader structure"]
impl crate::Readable for DDRCTRL_ADDRMAP11 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_addrmap11::W](ddrctrl_addrmap11::W) writer structure"]
impl crate::Writable for DDRCTRL_ADDRMAP11 {}
#[doc = "DDRCTRL address map register 11"]
pub mod ddrctrl_addrmap11;
#[doc = "DDRCTRL ODT configuration register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_odtcfg](ddrctrl_odtcfg) module"]
pub type DDRCTRL_ODTCFG = crate::Reg<u32, _DDRCTRL_ODTCFG>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ODTCFG;
#[doc = "`read()` method returns [ddrctrl_odtcfg::R](ddrctrl_odtcfg::R) reader structure"]
impl crate::Readable for DDRCTRL_ODTCFG {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_odtcfg::W](ddrctrl_odtcfg::W) writer structure"]
impl crate::Writable for DDRCTRL_ODTCFG {}
#[doc = "DDRCTRL ODT configuration register"]
pub mod ddrctrl_odtcfg;
#[doc = "DDRCTRL ODT/Rank map register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_odtmap](ddrctrl_odtmap) module"]
pub type DDRCTRL_ODTMAP = crate::Reg<u32, _DDRCTRL_ODTMAP>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_ODTMAP;
#[doc = "`read()` method returns [ddrctrl_odtmap::R](ddrctrl_odtmap::R) reader structure"]
impl crate::Readable for DDRCTRL_ODTMAP {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_odtmap::W](ddrctrl_odtmap::W) writer structure"]
impl crate::Writable for DDRCTRL_ODTMAP {}
#[doc = "DDRCTRL ODT/Rank map register"]
pub mod ddrctrl_odtmap;
#[doc = "DDRCTRL scheduler control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_sched](ddrctrl_sched) module"]
pub type DDRCTRL_SCHED = crate::Reg<u32, _DDRCTRL_SCHED>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_SCHED;
#[doc = "`read()` method returns [ddrctrl_sched::R](ddrctrl_sched::R) reader structure"]
impl crate::Readable for DDRCTRL_SCHED {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_sched::W](ddrctrl_sched::W) writer structure"]
impl crate::Writable for DDRCTRL_SCHED {}
#[doc = "DDRCTRL scheduler control register"]
pub mod ddrctrl_sched;
#[doc = "DDRCTRL scheduler control register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_sched1](ddrctrl_sched1) module"]
pub type DDRCTRL_SCHED1 = crate::Reg<u32, _DDRCTRL_SCHED1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_SCHED1;
#[doc = "`read()` method returns [ddrctrl_sched1::R](ddrctrl_sched1::R) reader structure"]
impl crate::Readable for DDRCTRL_SCHED1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_sched1::W](ddrctrl_sched1::W) writer structure"]
impl crate::Writable for DDRCTRL_SCHED1 {}
#[doc = "DDRCTRL scheduler control register 1"]
pub mod ddrctrl_sched1;
#[doc = "DDRCTRL high priority read CAM register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_perfhpr1](ddrctrl_perfhpr1) module"]
pub type DDRCTRL_PERFHPR1 = crate::Reg<u32, _DDRCTRL_PERFHPR1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PERFHPR1;
#[doc = "`read()` method returns [ddrctrl_perfhpr1::R](ddrctrl_perfhpr1::R) reader structure"]
impl crate::Readable for DDRCTRL_PERFHPR1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_perfhpr1::W](ddrctrl_perfhpr1::W) writer structure"]
impl crate::Writable for DDRCTRL_PERFHPR1 {}
#[doc = "DDRCTRL high priority read CAM register 1"]
pub mod ddrctrl_perfhpr1;
#[doc = "DDRCTRL low priority read CAM register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_perflpr1](ddrctrl_perflpr1) module"]
pub type DDRCTRL_PERFLPR1 = crate::Reg<u32, _DDRCTRL_PERFLPR1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PERFLPR1;
#[doc = "`read()` method returns [ddrctrl_perflpr1::R](ddrctrl_perflpr1::R) reader structure"]
impl crate::Readable for DDRCTRL_PERFLPR1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_perflpr1::W](ddrctrl_perflpr1::W) writer structure"]
impl crate::Writable for DDRCTRL_PERFLPR1 {}
#[doc = "DDRCTRL low priority read CAM register 1"]
pub mod ddrctrl_perflpr1;
#[doc = "DDRCTRL write CAM register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_perfwr1](ddrctrl_perfwr1) module"]
pub type DDRCTRL_PERFWR1 = crate::Reg<u32, _DDRCTRL_PERFWR1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PERFWR1;
#[doc = "`read()` method returns [ddrctrl_perfwr1::R](ddrctrl_perfwr1::R) reader structure"]
impl crate::Readable for DDRCTRL_PERFWR1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_perfwr1::W](ddrctrl_perfwr1::W) writer structure"]
impl crate::Writable for DDRCTRL_PERFWR1 {}
#[doc = "DDRCTRL write CAM register 1"]
pub mod ddrctrl_perfwr1;
#[doc = "DDRCTRL debug register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dbg0](ddrctrl_dbg0) module"]
pub type DDRCTRL_DBG0 = crate::Reg<u32, _DDRCTRL_DBG0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DBG0;
#[doc = "`read()` method returns [ddrctrl_dbg0::R](ddrctrl_dbg0::R) reader structure"]
impl crate::Readable for DDRCTRL_DBG0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dbg0::W](ddrctrl_dbg0::W) writer structure"]
impl crate::Writable for DDRCTRL_DBG0 {}
#[doc = "DDRCTRL debug register 0"]
pub mod ddrctrl_dbg0;
#[doc = "DDRCTRL debug register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dbg1](ddrctrl_dbg1) module"]
pub type DDRCTRL_DBG1 = crate::Reg<u32, _DDRCTRL_DBG1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DBG1;
#[doc = "`read()` method returns [ddrctrl_dbg1::R](ddrctrl_dbg1::R) reader structure"]
impl crate::Readable for DDRCTRL_DBG1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dbg1::W](ddrctrl_dbg1::W) writer structure"]
impl crate::Writable for DDRCTRL_DBG1 {}
#[doc = "DDRCTRL debug register 1"]
pub mod ddrctrl_dbg1;
#[doc = "DDRCTRL CAM debug register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dbgcam](ddrctrl_dbgcam) module"]
pub type DDRCTRL_DBGCAM = crate::Reg<u32, _DDRCTRL_DBGCAM>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DBGCAM;
#[doc = "`read()` method returns [ddrctrl_dbgcam::R](ddrctrl_dbgcam::R) reader structure"]
impl crate::Readable for DDRCTRL_DBGCAM {}
#[doc = "DDRCTRL CAM debug register"]
pub mod ddrctrl_dbgcam;
#[doc = "DDRCTRL command debug register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dbgcmd](ddrctrl_dbgcmd) module"]
pub type DDRCTRL_DBGCMD = crate::Reg<u32, _DDRCTRL_DBGCMD>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DBGCMD;
#[doc = "`read()` method returns [ddrctrl_dbgcmd::R](ddrctrl_dbgcmd::R) reader structure"]
impl crate::Readable for DDRCTRL_DBGCMD {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_dbgcmd::W](ddrctrl_dbgcmd::W) writer structure"]
impl crate::Writable for DDRCTRL_DBGCMD {}
#[doc = "DDRCTRL command debug register"]
pub mod ddrctrl_dbgcmd;
#[doc = "DDRCTRL status debug register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_dbgstat](ddrctrl_dbgstat) module"]
pub type DDRCTRL_DBGSTAT = crate::Reg<u32, _DDRCTRL_DBGSTAT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_DBGSTAT;
#[doc = "`read()` method returns [ddrctrl_dbgstat::R](ddrctrl_dbgstat::R) reader structure"]
impl crate::Readable for DDRCTRL_DBGSTAT {}
#[doc = "DDRCTRL status debug register"]
pub mod ddrctrl_dbgstat;
#[doc = "DDRCTRL software register programming control enable\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_swctl](ddrctrl_swctl) module"]
pub type DDRCTRL_SWCTL = crate::Reg<u32, _DDRCTRL_SWCTL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_SWCTL;
#[doc = "`read()` method returns [ddrctrl_swctl::R](ddrctrl_swctl::R) reader structure"]
impl crate::Readable for DDRCTRL_SWCTL {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_swctl::W](ddrctrl_swctl::W) writer structure"]
impl crate::Writable for DDRCTRL_SWCTL {}
#[doc = "DDRCTRL software register programming control enable"]
pub mod ddrctrl_swctl;
#[doc = "DDRCTRL software register programming control status\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_swstat](ddrctrl_swstat) module"]
pub type DDRCTRL_SWSTAT = crate::Reg<u32, _DDRCTRL_SWSTAT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_SWSTAT;
#[doc = "`read()` method returns [ddrctrl_swstat::R](ddrctrl_swstat::R) reader structure"]
impl crate::Readable for DDRCTRL_SWSTAT {}
#[doc = "DDRCTRL software register programming control status"]
pub mod ddrctrl_swstat;
#[doc = "AXI Poison configuration register common for all AXI ports.\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_poisoncfg](ddrctrl_poisoncfg) module"]
pub type DDRCTRL_POISONCFG = crate::Reg<u32, _DDRCTRL_POISONCFG>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_POISONCFG;
#[doc = "`read()` method returns [ddrctrl_poisoncfg::R](ddrctrl_poisoncfg::R) reader structure"]
impl crate::Readable for DDRCTRL_POISONCFG {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_poisoncfg::W](ddrctrl_poisoncfg::W) writer structure"]
impl crate::Writable for DDRCTRL_POISONCFG {}
#[doc = "AXI Poison configuration register common for all AXI ports."]
pub mod ddrctrl_poisoncfg;
#[doc = "DDRCTRL AXI Poison status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_poisonstat](ddrctrl_poisonstat) module"]
pub type DDRCTRL_POISONSTAT = crate::Reg<u32, _DDRCTRL_POISONSTAT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_POISONSTAT;
#[doc = "`read()` method returns [ddrctrl_poisonstat::R](ddrctrl_poisonstat::R) reader structure"]
impl crate::Readable for DDRCTRL_POISONSTAT {}
#[doc = "DDRCTRL AXI Poison status register"]
pub mod ddrctrl_poisonstat;
#[doc = "DDRCTRL port status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pstat](ddrctrl_pstat) module"]
pub type DDRCTRL_PSTAT = crate::Reg<u32, _DDRCTRL_PSTAT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PSTAT;
#[doc = "`read()` method returns [ddrctrl_pstat::R](ddrctrl_pstat::R) reader structure"]
impl crate::Readable for DDRCTRL_PSTAT {}
#[doc = "DDRCTRL port status register"]
pub mod ddrctrl_pstat;
#[doc = "DDRCTRL port common configuration register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pccfg](ddrctrl_pccfg) module"]
pub type DDRCTRL_PCCFG = crate::Reg<u32, _DDRCTRL_PCCFG>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCCFG;
#[doc = "`read()` method returns [ddrctrl_pccfg::R](ddrctrl_pccfg::R) reader structure"]
impl crate::Readable for DDRCTRL_PCCFG {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pccfg::W](ddrctrl_pccfg::W) writer structure"]
impl crate::Writable for DDRCTRL_PCCFG {}
#[doc = "DDRCTRL port common configuration register"]
pub mod ddrctrl_pccfg;
#[doc = "DDRCTRL port n configuration read register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pcfgr_0](ddrctrl_pcfgr_0) module"]
pub type DDRCTRL_PCFGR_0 = crate::Reg<u32, _DDRCTRL_PCFGR_0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCFGR_0;
#[doc = "`read()` method returns [ddrctrl_pcfgr_0::R](ddrctrl_pcfgr_0::R) reader structure"]
impl crate::Readable for DDRCTRL_PCFGR_0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pcfgr_0::W](ddrctrl_pcfgr_0::W) writer structure"]
impl crate::Writable for DDRCTRL_PCFGR_0 {}
#[doc = "DDRCTRL port n configuration read register"]
pub mod ddrctrl_pcfgr_0;
#[doc = "DDRCTRL port n configuration write register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pcfgw_0](ddrctrl_pcfgw_0) module"]
pub type DDRCTRL_PCFGW_0 = crate::Reg<u32, _DDRCTRL_PCFGW_0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCFGW_0;
#[doc = "`read()` method returns [ddrctrl_pcfgw_0::R](ddrctrl_pcfgw_0::R) reader structure"]
impl crate::Readable for DDRCTRL_PCFGW_0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pcfgw_0::W](ddrctrl_pcfgw_0::W) writer structure"]
impl crate::Writable for DDRCTRL_PCFGW_0 {}
#[doc = "DDRCTRL port n configuration write register"]
pub mod ddrctrl_pcfgw_0;
#[doc = "DDRCTRL port n control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pctrl_0](ddrctrl_pctrl_0) module"]
pub type DDRCTRL_PCTRL_0 = crate::Reg<u32, _DDRCTRL_PCTRL_0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCTRL_0;
#[doc = "`read()` method returns [ddrctrl_pctrl_0::R](ddrctrl_pctrl_0::R) reader structure"]
impl crate::Readable for DDRCTRL_PCTRL_0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pctrl_0::W](ddrctrl_pctrl_0::W) writer structure"]
impl crate::Writable for DDRCTRL_PCTRL_0 {}
#[doc = "DDRCTRL port n control register"]
pub mod ddrctrl_pctrl_0;
#[doc = "DDRCTRL port n read Q0S configuration register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pcfgqos0_0](ddrctrl_pcfgqos0_0) module"]
pub type DDRCTRL_PCFGQOS0_0 = crate::Reg<u32, _DDRCTRL_PCFGQOS0_0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCFGQOS0_0;
#[doc = "`read()` method returns [ddrctrl_pcfgqos0_0::R](ddrctrl_pcfgqos0_0::R) reader structure"]
impl crate::Readable for DDRCTRL_PCFGQOS0_0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pcfgqos0_0::W](ddrctrl_pcfgqos0_0::W) writer structure"]
impl crate::Writable for DDRCTRL_PCFGQOS0_0 {}
#[doc = "DDRCTRL port n read Q0S configuration register 0"]
pub mod ddrctrl_pcfgqos0_0;
#[doc = "DDRCTRL port n read Q0S configuration register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pcfgqos1_0](ddrctrl_pcfgqos1_0) module"]
pub type DDRCTRL_PCFGQOS1_0 = crate::Reg<u32, _DDRCTRL_PCFGQOS1_0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCFGQOS1_0;
#[doc = "`read()` method returns [ddrctrl_pcfgqos1_0::R](ddrctrl_pcfgqos1_0::R) reader structure"]
impl crate::Readable for DDRCTRL_PCFGQOS1_0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pcfgqos1_0::W](ddrctrl_pcfgqos1_0::W) writer structure"]
impl crate::Writable for DDRCTRL_PCFGQOS1_0 {}
#[doc = "DDRCTRL port n read Q0S configuration register 1"]
pub mod ddrctrl_pcfgqos1_0;
#[doc = "DDRCTRL port n write Q0S configuration register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pcfgwqos0_0](ddrctrl_pcfgwqos0_0) module"]
pub type DDRCTRL_PCFGWQOS0_0 = crate::Reg<u32, _DDRCTRL_PCFGWQOS0_0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCFGWQOS0_0;
#[doc = "`read()` method returns [ddrctrl_pcfgwqos0_0::R](ddrctrl_pcfgwqos0_0::R) reader structure"]
impl crate::Readable for DDRCTRL_PCFGWQOS0_0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pcfgwqos0_0::W](ddrctrl_pcfgwqos0_0::W) writer structure"]
impl crate::Writable for DDRCTRL_PCFGWQOS0_0 {}
#[doc = "DDRCTRL port n write Q0S configuration register 0"]
pub mod ddrctrl_pcfgwqos0_0;
#[doc = "DDRCTRL port n write Q0S configuration register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pcfgwqos1_0](ddrctrl_pcfgwqos1_0) module"]
pub type DDRCTRL_PCFGWQOS1_0 = crate::Reg<u32, _DDRCTRL_PCFGWQOS1_0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCFGWQOS1_0;
#[doc = "`read()` method returns [ddrctrl_pcfgwqos1_0::R](ddrctrl_pcfgwqos1_0::R) reader structure"]
impl crate::Readable for DDRCTRL_PCFGWQOS1_0 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pcfgwqos1_0::W](ddrctrl_pcfgwqos1_0::W) writer structure"]
impl crate::Writable for DDRCTRL_PCFGWQOS1_0 {}
#[doc = "DDRCTRL port n write Q0S configuration register 1"]
pub mod ddrctrl_pcfgwqos1_0;
#[doc = "DDRCTRL port n configuration read register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pcfgr_1](ddrctrl_pcfgr_1) module"]
pub type DDRCTRL_PCFGR_1 = crate::Reg<u32, _DDRCTRL_PCFGR_1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCFGR_1;
#[doc = "`read()` method returns [ddrctrl_pcfgr_1::R](ddrctrl_pcfgr_1::R) reader structure"]
impl crate::Readable for DDRCTRL_PCFGR_1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pcfgr_1::W](ddrctrl_pcfgr_1::W) writer structure"]
impl crate::Writable for DDRCTRL_PCFGR_1 {}
#[doc = "DDRCTRL port n configuration read register"]
pub mod ddrctrl_pcfgr_1;
#[doc = "DDRCTRL port n configuration write register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pcfgw_1](ddrctrl_pcfgw_1) module"]
pub type DDRCTRL_PCFGW_1 = crate::Reg<u32, _DDRCTRL_PCFGW_1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCFGW_1;
#[doc = "`read()` method returns [ddrctrl_pcfgw_1::R](ddrctrl_pcfgw_1::R) reader structure"]
impl crate::Readable for DDRCTRL_PCFGW_1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pcfgw_1::W](ddrctrl_pcfgw_1::W) writer structure"]
impl crate::Writable for DDRCTRL_PCFGW_1 {}
#[doc = "DDRCTRL port n configuration write register"]
pub mod ddrctrl_pcfgw_1;
#[doc = "DDRCTRL port n control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pctrl_1](ddrctrl_pctrl_1) module"]
pub type DDRCTRL_PCTRL_1 = crate::Reg<u32, _DDRCTRL_PCTRL_1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCTRL_1;
#[doc = "`read()` method returns [ddrctrl_pctrl_1::R](ddrctrl_pctrl_1::R) reader structure"]
impl crate::Readable for DDRCTRL_PCTRL_1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pctrl_1::W](ddrctrl_pctrl_1::W) writer structure"]
impl crate::Writable for DDRCTRL_PCTRL_1 {}
#[doc = "DDRCTRL port n control register"]
pub mod ddrctrl_pctrl_1;
#[doc = "DDRCTRL port n read Q0S configuration register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pcfgqos0_1](ddrctrl_pcfgqos0_1) module"]
pub type DDRCTRL_PCFGQOS0_1 = crate::Reg<u32, _DDRCTRL_PCFGQOS0_1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCFGQOS0_1;
#[doc = "`read()` method returns [ddrctrl_pcfgqos0_1::R](ddrctrl_pcfgqos0_1::R) reader structure"]
impl crate::Readable for DDRCTRL_PCFGQOS0_1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pcfgqos0_1::W](ddrctrl_pcfgqos0_1::W) writer structure"]
impl crate::Writable for DDRCTRL_PCFGQOS0_1 {}
#[doc = "DDRCTRL port n read Q0S configuration register 0"]
pub mod ddrctrl_pcfgqos0_1;
#[doc = "DDRCTRL port n read Q0S configuration register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pcfgqos1_1](ddrctrl_pcfgqos1_1) module"]
pub type DDRCTRL_PCFGQOS1_1 = crate::Reg<u32, _DDRCTRL_PCFGQOS1_1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCFGQOS1_1;
#[doc = "`read()` method returns [ddrctrl_pcfgqos1_1::R](ddrctrl_pcfgqos1_1::R) reader structure"]
impl crate::Readable for DDRCTRL_PCFGQOS1_1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pcfgqos1_1::W](ddrctrl_pcfgqos1_1::W) writer structure"]
impl crate::Writable for DDRCTRL_PCFGQOS1_1 {}
#[doc = "DDRCTRL port n read Q0S configuration register 1"]
pub mod ddrctrl_pcfgqos1_1;
#[doc = "DDRCTRL port n write Q0S configuration register 0\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pcfgwqos0_1](ddrctrl_pcfgwqos0_1) module"]
pub type DDRCTRL_PCFGWQOS0_1 = crate::Reg<u32, _DDRCTRL_PCFGWQOS0_1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCFGWQOS0_1;
#[doc = "`read()` method returns [ddrctrl_pcfgwqos0_1::R](ddrctrl_pcfgwqos0_1::R) reader structure"]
impl crate::Readable for DDRCTRL_PCFGWQOS0_1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pcfgwqos0_1::W](ddrctrl_pcfgwqos0_1::W) writer structure"]
impl crate::Writable for DDRCTRL_PCFGWQOS0_1 {}
#[doc = "DDRCTRL port n write Q0S configuration register 0"]
pub mod ddrctrl_pcfgwqos0_1;
#[doc = "DDRCTRL port n write Q0S configuration register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ddrctrl_pcfgwqos1_1](ddrctrl_pcfgwqos1_1) module"]
pub type DDRCTRL_PCFGWQOS1_1 = crate::Reg<u32, _DDRCTRL_PCFGWQOS1_1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DDRCTRL_PCFGWQOS1_1;
#[doc = "`read()` method returns [ddrctrl_pcfgwqos1_1::R](ddrctrl_pcfgwqos1_1::R) reader structure"]
impl crate::Readable for DDRCTRL_PCFGWQOS1_1 {}
#[doc = "`write(|w| ..)` method takes [ddrctrl_pcfgwqos1_1::W](ddrctrl_pcfgwqos1_1::W) writer structure"]
impl crate::Writable for DDRCTRL_PCFGWQOS1_1 {}
#[doc = "DDRCTRL port n write Q0S configuration register 1"]
pub mod ddrctrl_pcfgwqos1_1;
|
#![allow(clippy::let_unit_value)]
mod auth;
mod database;
mod meal_plan;
mod recipe;
mod shopping_list;
use async_trait::async_trait;
use oikos_api::{models::*, server::OikosApi};
#[derive(Debug, thiserror::Error)]
pub enum ServerError {
#[error("recipe error")]
RecipeError(#[from] recipe::RecipeError),
#[error("auth error")]
AuthError(#[from] auth::AuthError),
#[error("meal plans error")]
MealPlanError(#[from] meal_plan::MealPlanError),
#[error("shopping list error")]
ShoppingListError(#[from] shopping_list::ShoppingListError),
}
#[derive(Clone)]
pub struct Server {}
impl Server {
pub async fn new() -> Self {
Server {}
}
}
#[async_trait(?Send)]
impl OikosApi for Server {
type Error = ServerError;
async fn get_info(
&self,
_parameters: get_info::Parameters,
) -> Result<get_info::Success, get_info::Error<Self::Error>> {
use get_info::*;
Ok(Success::Status200(Status200 {
version: Some(oikos_api::VERSION.to_string()),
}))
}
async fn get_recipes(
&self,
get_recipes::Parameters { authorization }: get_recipes::Parameters,
) -> Result<get_recipes::Success, get_recipes::Error<Self::Error>> {
use get_recipes::*;
match self.get_recipes(&authorization) {
Ok(recipes) => Ok(Success::Status200(recipes)),
Err(err) => Err(Error::Unknown(err.into())),
}
}
async fn add_recipe(
&self,
add_recipe::Parameters { authorization }: add_recipe::Parameters,
body: add_recipe::Body,
) -> Result<add_recipe::Success, add_recipe::Error<Self::Error>> {
use add_recipe::*;
match self.add_recipe(&body, &authorization).await {
Ok(recipe) => Ok(Success::Status200(recipe)),
Err(err) => Err(Error::Unknown(err.into())),
}
}
async fn get_recipe_by_id(
&self,
get_recipe_by_id::Parameters {
authorization,
recipe_id,
}: get_recipe_by_id::Parameters,
) -> Result<get_recipe_by_id::Success, get_recipe_by_id::Error<Self::Error>> {
use get_recipe_by_id::*;
match self.get_recipe_by_id(&recipe_id, &authorization) {
Ok(recipe) => Ok(Success::Status200(recipe)),
Err(err @ recipe::RecipeError::NotFound(_)) => Err(Error::Status404(err.to_string())),
Err(err) => Err(Error::Unknown(err.into())),
}
}
async fn update_recipe_by_id(
&self,
update_recipe_by_id::Parameters {
authorization,
recipe_id,
}: update_recipe_by_id::Parameters,
body: update_recipe_by_id::Body,
) -> Result<update_recipe_by_id::Success, update_recipe_by_id::Error<Self::Error>> {
use update_recipe_by_id::*;
match self
.update_recipe_by_id(&recipe_id, &body, &authorization)
.await
{
Ok(recipe) => Ok(Success::Status200(recipe)),
Err(err @ recipe::RecipeError::NotFound(_)) => Err(Error::Status404(err.to_string())),
Err(err) => Err(Error::Unknown(err.into())),
}
}
async fn delete_recipe_by_id(
&self,
delete_recipe_by_id::Parameters {
authorization,
recipe_id,
}: delete_recipe_by_id::Parameters,
_body: delete_recipe_by_id::Body,
) -> Result<delete_recipe_by_id::Success, delete_recipe_by_id::Error<Self::Error>> {
use delete_recipe_by_id::*;
match self.delete_recipe_by_id(&recipe_id, &authorization) {
Ok(_) => Ok(Success::Status200(())),
Err(err @ recipe::RecipeError::NotFound(_)) => Err(Error::Status404(err.to_string())),
Err(err) => Err(Error::Unknown(err.into())),
}
}
async fn get_oauth_access_token(
&self,
_parameters: get_oauth_access_token::Parameters,
get_oauth_access_token::Body { code }: get_oauth_access_token::Body,
) -> Result<get_oauth_access_token::Success, get_oauth_access_token::Error<Self::Error>> {
use get_oauth_access_token::*;
match self.get_oauth_access_token(&code).await {
Ok(access_token) => Ok(Success::Status200(access_token)),
Err(err) => Err(Error::Unknown(err.into())),
}
}
async fn get_meal_plans(
&self,
get_meal_plans::Parameters { authorization }: get_meal_plans::Parameters,
) -> Result<get_meal_plans::Success, get_meal_plans::Error<Self::Error>> {
use get_meal_plans::*;
match self.get_meal_plans(&authorization).await {
Ok(meal_plans) => Ok(Success::Status200(meal_plans)),
Err(err) => Err(Error::Unknown(err.into())),
}
}
async fn update_meal_plans(
&self,
update_meal_plans::Parameters { authorization }: update_meal_plans::Parameters,
meal_plans: update_meal_plans::Body,
) -> Result<update_meal_plans::Success, update_meal_plans::Error<Self::Error>> {
use update_meal_plans::*;
match self.update_meal_plans(meal_plans, &authorization).await {
Ok(meal_plans) => Ok(Success::Status200(meal_plans)),
Err(err) => Err(Error::Unknown(err.into())),
}
}
async fn get_shopping_list(
&self,
get_shopping_list::Parameters { authorization }: get_shopping_list::Parameters,
) -> Result<get_shopping_list::Success, get_shopping_list::Error<Self::Error>> {
use get_shopping_list::*;
match self.get_shopping_list(&authorization).await {
Ok(shopping_list) => Ok(Success::Status200(shopping_list)),
Err(err) => Err(Error::Unknown(err.into())),
}
}
}
|
struct Solution;
const CHAR_A_U8: u8 = 'a' as u8;
impl Solution {
pub fn partition_labels(s: String) -> Vec<i32> {
let mut ans = Vec::new();
// 每个字母最后出现的位置。
let last_pos = s.chars().enumerate().fold([0; 26], |mut acc, (idx, c)| {
acc[(c as u8 - CHAR_A_U8) as usize] = idx;
acc
});
let (mut start, mut end) = (0, 0);
for (idx, c) in s.chars().enumerate() {
// 当前片段中所有字母出现的最后位置。
end = end.max(last_pos[(c as u8 - CHAR_A_U8) as usize]);
// 已经遍历到当前片段的右边界了,找到了一个片段。
if idx == end {
ans.push((end - start + 1) as i32);
start = end + 1;
}
}
ans
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_partition_labels() {
assert_eq!(
Solution::partition_labels("ababcbacadefegdehijhklij".to_owned()),
vec![9, 7, 8]
);
}
}
|
use std::sync::Arc;
#[derive(Clone)]
pub struct SharedVecSlice {
pub data: Arc<Vec<u8>>,
pub start: usize,
pub len: usize,
}
impl SharedVecSlice {
pub fn empty() -> SharedVecSlice {
SharedVecSlice::new(Arc::new(Vec::new()))
}
pub fn new(data: Arc<Vec<u8>>) -> SharedVecSlice {
let data_len = data.len();
SharedVecSlice {
data,
start: 0,
len: data_len,
}
}
pub fn as_slice(&self) -> &[u8] {
&self.data[self.start..self.start + self.len]
}
pub fn slice(&self, from_offset: usize, to_offset: usize) -> SharedVecSlice {
SharedVecSlice {
data: Arc::clone(&self.data),
start: self.start + from_offset,
len: to_offset - from_offset,
}
}
}
impl From<Vec<u8>> for SharedVecSlice {
fn from(data: Vec<u8>) -> SharedVecSlice {
SharedVecSlice::new(Arc::new(data))
}
}
|
#[macro_use]
extern crate quick_error;
use intcode::Intcode;
use std::borrow::Cow;
use std::env;
use std::io;
use std::num::ParseIntError;
quick_error! {
#[derive(Debug)]
pub enum SuperError {
IoError(err: io::Error) { from() }
ParseIntError(err: ParseIntError) { from() }
}
}
fn main() -> Result<(), SuperError> {
let input = {
let name: Cow<'static, str> = env::args().nth(1)
.map(|s| s.into()).unwrap_or_else(|| "input".into());
std::fs::read_to_string(name.as_ref())?
};
let memory = input.trim().split(',')
.map(str::parse)
.collect::<Result<Vec<isize>, ParseIntError>>()?;
// "before running the program, replace position 1 with the value 12 and replace position 2 with the value 2"
println!("Part 1: {}", run_program_with(&memory, 12, 2)?);
// for some reason, position 1 and 2 are nouns and verbs and we need to brute force them until we get 19690720?
'exit: for noun in 0..=99 {
for verb in 0..=99 {
if run_program_with(&memory, noun, verb)? == 19690720 {
println!("Part 2: {}", 100 * noun + verb);
break 'exit;
}
}
}
Ok(())
}
fn run_program_with(memory: &[isize], noun: isize, verb: isize) -> io::Result<isize> {
let mut program = Intcode::new(memory);
program.memory[1] = noun;
program.memory[2] = verb;
match program.run() {
Ok(_) => Ok(program.memory[0]),
Err(e) => Err(e),
}
}
#[cfg(test)]
fn test_program_helper(input: &[isize], output: &[isize]) {
let mut program = Intcode::new(input);
assert_eq!(program.run().unwrap(), false);
assert_eq!(program.memory, output);
}
#[test]
fn test_program() {
test_program_helper(&[1,9,10,3,2,3,11,0,99,30,40,50], &[3500,9,10,70,2,3,11,0,99,30,40,50]);
test_program_helper(&[1,0,0,0,99], &[2,0,0,0,99]);
test_program_helper(&[2,3,0,3,99], &[2,3,0,6,99]);
test_program_helper(&[2,4,4,5,99,0], &[2,4,4,5,99,9801]);
test_program_helper(&[1,1,1,4,99,5,6,0,99], &[30,1,1,4,2,5,6,0,99]);
}
|
use wasm_bindgen::prelude::*;
type DocumentId = u32;
/// Modify the active Document in the editor state store
#[wasm_bindgen]
pub fn set_active_document(document_id: DocumentId) {
todo!("set_active_document {}", document_id)
}
/// Query the name of a specific document
#[wasm_bindgen]
pub fn get_document_name(document_id: DocumentId) -> String {
todo!("get_document_name {}", document_id)
}
/// Query the id of the most recently interacted with document
#[wasm_bindgen]
pub fn get_active_document() -> DocumentId {
todo!("get_active_document")
}
type PanelId = u32;
/// Notify the editor that the mouse hovers above a panel
#[wasm_bindgen]
pub fn panel_hover_enter(panel_id: PanelId) {
todo!("panel_hover_enter {}", panel_id)
}
/// Query a list of currently available operations
#[wasm_bindgen]
pub fn get_available_operations() -> Vec<JsValue> {
todo!("get_available_operations")
// vec!["example1", "example2"].into_iter().map(JsValue::from).collect()
}
/*
/// Load a new .gdd file into the editor
/// Returns a unique document identifier
#[wasm_bindgen]
pub fn load_document(raw_data: &[u8]) -> DocumentId {
todo!()
}*/
|
fn main(){
println!("Hello Rust!");
let x = 6;
let y = 9;
println!("x is {}, y is {}", x ,y);
println!("x is {valx}, y is {fred}", valx=x , fred=y); // passing values
println!("Debug {:?}", (3,4)); //Debug show what is in variable
println!("y is {1}, x is {0}", x, y);// using index on variables
} |
use std::fmt;
// possible boolean values
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub struct BoolDomain {
pub tt: bool,
pub ff: bool,
}
impl fmt::Display for BoolDomain {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match (self.tt, self.ff) {
(true, true) => f.write_str("bool"),
(true, false) => f.write_str("tt"),
(false, true) => f.write_str("ff"),
(false, false) => Ok(()),
}
}
}
impl Default for BoolDomain {
fn default() -> Self {
Self {
tt: false,
ff: false,
}
}
}
impl BoolDomain {
pub fn is_bottom(&self) -> bool {
!self.tt && !self.ff
}
pub fn sup(&self, other: &Self) -> Self {
Self {
tt: self.tt || other.tt,
ff: self.ff || other.ff,
}
}
pub fn inf(&self, other: &Self) -> Self {
Self {
tt: self.tt && other.tt,
ff: self.ff && other.ff,
}
}
}
|
use anyhow::{anyhow, Result};
use byteorder::{ByteOrder, LittleEndian};
use secstr::SecUtf8;
use std::ffi::OsStr;
use std::iter::once;
use std::mem::MaybeUninit;
use std::os::windows::ffi::OsStrExt;
use std::slice;
use std::str;
use winapi::shared::minwindef::FILETIME;
use winapi::um::wincred::{
CredDeleteW, CredFree, CredReadW, CredWriteW, CREDENTIALW, CRED_PERSIST_ENTERPRISE, CRED_TYPE_GENERIC, PCREDENTIALW,
PCREDENTIAL_ATTRIBUTEW,
};
use crate::Store;
#[derive(Debug)]
pub struct KeychainStore;
impl Store for KeychainStore {
fn put(
&self,
namespace: impl Into<String> + Send,
key: impl Into<String> + Send,
value: impl Into<SecUtf8> + Send,
) -> Result<()> {
let ns: String = namespace.into();
let k: String = key.into();
// Setting values of credential
let flags = 0;
let cred_type = CRED_TYPE_GENERIC;
let target_name: String = [k.as_str(), ns.as_str()].join(".");
let mut target_name = to_wstr(&target_name);
// empty string for comments, and target alias,
// I don't use here
let label = format!("secret for etradectl {}@{}", &k, &ns);
let mut empty_str = to_wstr(&label);
// Ignored by CredWriteW
let last_written = FILETIME {
dwLowDateTime: 0,
dwHighDateTime: 0,
};
// In order to allow editing of the password
// from within Windows, the password must be
// transformed into utf16. (but because it's a
// blob, it then needs to be passed to windows
// as an array of bytes).
let blob_u16 = to_wstr_no_null(value.into().unsecure());
let mut blob = vec![0; blob_u16.len() * 2];
LittleEndian::write_u16_into(&blob_u16, &mut blob);
let blob_len = blob.len() as u32;
let persist = CRED_PERSIST_ENTERPRISE;
let attribute_count = 0;
let attributes: PCREDENTIAL_ATTRIBUTEW = std::ptr::null_mut();
let mut username = to_wstr(&k);
let mut credential = CREDENTIALW {
Flags: flags,
Type: cred_type,
TargetName: target_name.as_mut_ptr(),
Comment: empty_str.as_mut_ptr(),
LastWritten: last_written,
CredentialBlobSize: blob_len,
CredentialBlob: blob.as_mut_ptr(),
Persist: persist,
AttributeCount: attribute_count,
Attributes: attributes,
TargetAlias: empty_str.as_mut_ptr(),
UserName: username.as_mut_ptr(),
};
// raw pointer to credential, is coerced from &mut
let pcredential: PCREDENTIALW = &mut credential;
// Call windows API
match unsafe { CredWriteW(pcredential, 0) } {
0 => Err(anyhow!("windows vault error")),
_ => Ok(()),
}
}
fn del(&self, namespace: impl AsRef<str> + Send, key: impl AsRef<str> + Send) -> Result<()> {
let target_name: String = [key.as_ref(), namespace.as_ref()].join(".");
let cred_type = CRED_TYPE_GENERIC;
let target_name = to_wstr(&target_name);
match unsafe { CredDeleteW(target_name.as_ptr(), cred_type, 0) } {
0 => Err(anyhow!("windows vault error")),
_ => Ok(()),
}
}
fn get(&self, namespace: impl AsRef<str> + Send, key: impl AsRef<str> + Send) -> Result<Option<SecUtf8>> {
// passing uninitialized pcredential.
// Should be ok; it's freed by a windows api
// call CredFree.
let mut pcredential = MaybeUninit::uninit();
let target_name: String = [key.as_ref(), namespace.as_ref()].join(".");
let target_name = to_wstr(&target_name);
let cred_type = CRED_TYPE_GENERIC;
// Windows api call
match unsafe { CredReadW(target_name.as_ptr(), cred_type, 0, pcredential.as_mut_ptr()) } {
0 => Err(anyhow!("windows vault error")),
_ => {
let pcredential = unsafe { pcredential.assume_init() };
// Dereferencing pointer to credential
let credential: CREDENTIALW = unsafe { *pcredential };
// get blob by creating an array from the pointer
// and the length reported back from the credential
let blob_pointer: *const u8 = credential.CredentialBlob;
let blob_len: usize = credential.CredentialBlobSize as usize;
// blob needs to be transformed from bytes to an
// array of u16, which will then be transformed into
// a utf8 string. As noted above, this is to allow
// editing of the password from within the vault order
// or other windows programs, which operate in utf16
let blob: &[u8] = unsafe { slice::from_raw_parts(blob_pointer, blob_len) };
let mut blob_u16 = vec![0; blob_len / 2];
LittleEndian::read_u16_into(&blob, &mut blob_u16);
// Now can get utf8 string from the array
let password = String::from_utf16(&blob_u16)
.map(|pass| Some(pass.to_string().into()))
.map_err(|_| anyhow!("windows vault error"));
// Free the credential
unsafe {
CredFree(pcredential as *mut _);
}
password
}
}
}
}
// helper function for turning utf8 strings to windows
// utf16
fn to_wstr(s: &str) -> Vec<u16> {
OsStr::new(s).encode_wide().chain(once(0)).collect()
}
fn to_wstr_no_null(s: &str) -> Vec<u16> {
OsStr::new(s).encode_wide().collect()
}
|
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Interrupt mask register (EXTI_IMR)"]
pub imr: IMR,
#[doc = "0x04 - Event mask register (EXTI_EMR)"]
pub emr: EMR,
#[doc = "0x08 - Rising Trigger selection register (EXTI_RTSR)"]
pub rtsr: RTSR,
#[doc = "0x0c - Falling Trigger selection register (EXTI_FTSR)"]
pub ftsr: FTSR,
#[doc = "0x10 - Software interrupt event register (EXTI_SWIER)"]
pub swier: SWIER,
#[doc = "0x14 - Pending register (EXTI_PR)"]
pub pr: PR,
}
#[doc = "IMR (rw) register accessor: Interrupt mask register (EXTI_IMR)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`imr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`imr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`imr`]
module"]
pub type IMR = crate::Reg<imr::IMR_SPEC>;
#[doc = "Interrupt mask register (EXTI_IMR)"]
pub mod imr;
#[doc = "EMR (rw) register accessor: Event mask register (EXTI_EMR)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`emr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`emr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`emr`]
module"]
pub type EMR = crate::Reg<emr::EMR_SPEC>;
#[doc = "Event mask register (EXTI_EMR)"]
pub mod emr;
#[doc = "RTSR (rw) register accessor: Rising Trigger selection register (EXTI_RTSR)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rtsr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rtsr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`rtsr`]
module"]
pub type RTSR = crate::Reg<rtsr::RTSR_SPEC>;
#[doc = "Rising Trigger selection register (EXTI_RTSR)"]
pub mod rtsr;
#[doc = "FTSR (rw) register accessor: Falling Trigger selection register (EXTI_FTSR)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ftsr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ftsr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`ftsr`]
module"]
pub type FTSR = crate::Reg<ftsr::FTSR_SPEC>;
#[doc = "Falling Trigger selection register (EXTI_FTSR)"]
pub mod ftsr;
#[doc = "SWIER (rw) register accessor: Software interrupt event register (EXTI_SWIER)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`swier::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`swier::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`swier`]
module"]
pub type SWIER = crate::Reg<swier::SWIER_SPEC>;
#[doc = "Software interrupt event register (EXTI_SWIER)"]
pub mod swier;
#[doc = "PR (rw) register accessor: Pending register (EXTI_PR)\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`pr::R`]. You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`pr::W`]. You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [`pr`]
module"]
pub type PR = crate::Reg<pr::PR_SPEC>;
#[doc = "Pending register (EXTI_PR)"]
pub mod pr;
|
// Copyright 2019-2020 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
use libp2p::core::multiaddr::{Multiaddr, Protocol};
use rand::seq::SliceRandom;
use std::collections::HashMap;
use sc_network::PeerId;
use sp_authority_discovery::AuthorityId;
/// The maximum number of authority connections initialized through the authority discovery module.
///
/// In other words the maximum size of the `authority` peerset priority group.
const MAX_NUM_AUTHORITY_CONN: usize = 10;
/// Cache for [`AuthorityId`] -> [`Vec<Multiaddr>`] and [`PeerId`] -> [`AuthorityId`] mappings.
pub(super) struct AddrCache {
authority_id_to_addresses: HashMap<AuthorityId, Vec<Multiaddr>>,
peer_id_to_authority_id: HashMap<PeerId, AuthorityId>,
}
impl AddrCache {
pub fn new() -> Self {
AddrCache {
authority_id_to_addresses: HashMap::new(),
peer_id_to_authority_id: HashMap::new(),
}
}
/// Inserts the given [`AuthorityId`] and [`Vec<Multiaddr>`] pair for future lookups by
/// [`AuthorityId`] or [`PeerId`].
pub fn insert(&mut self, authority_id: AuthorityId, mut addresses: Vec<Multiaddr>) {
if addresses.is_empty() {
return
}
// Insert into `self.peer_id_to_authority_id`.
let peer_ids =
addresses.iter().map(|a| peer_id_from_multiaddr(a)).filter_map(|peer_id| peer_id);
for peer_id in peer_ids {
self.peer_id_to_authority_id.insert(peer_id, authority_id.clone());
}
// Insert into `self.authority_id_to_addresses`.
addresses.sort_unstable_by(|a, b| a.as_ref().cmp(b.as_ref()));
self.authority_id_to_addresses.insert(authority_id, addresses);
}
/// Returns the number of authority IDs in the cache.
pub fn num_ids(&self) -> usize {
self.authority_id_to_addresses.len()
}
/// Returns the addresses for the given [`AuthorityId`].
pub fn get_addresses_by_authority_id(
&self,
authority_id: &AuthorityId,
) -> Option<&Vec<Multiaddr>> {
self.authority_id_to_addresses.get(&authority_id)
}
/// Returns the [`AuthorityId`] for the given [`PeerId`].
pub fn get_authority_id_by_peer_id(&self, peer_id: &PeerId) -> Option<&AuthorityId> {
self.peer_id_to_authority_id.get(peer_id)
}
/// Returns a single address for a random subset (maximum of [`MAX_NUM_AUTHORITY_CONN`]) of all
/// known authorities.
pub fn get_random_subset(&self) -> Vec<Multiaddr> {
let mut rng = rand::thread_rng();
let mut addresses = self
.authority_id_to_addresses
.iter()
.filter_map(|(_authority_id, addresses)| {
debug_assert!(!addresses.is_empty());
addresses.choose(&mut rng)
})
.collect::<Vec<&Multiaddr>>();
addresses.sort_unstable_by(|a, b| a.as_ref().cmp(b.as_ref()));
addresses.dedup();
addresses.choose_multiple(&mut rng, MAX_NUM_AUTHORITY_CONN).map(|a| (**a).clone()).collect()
}
/// Removes all [`PeerId`]s and [`Multiaddr`]s from the cache that are not related to the given
/// [`AuthorityId`]s.
pub fn retain_ids(&mut self, authority_ids: &Vec<AuthorityId>) {
// The below logic could be replaced by `BtreeMap::drain_filter` once it stabilized.
let authority_ids_to_remove = self
.authority_id_to_addresses
.iter()
.filter(|(id, _addresses)| !authority_ids.contains(id))
.map(|entry| entry.0)
.cloned()
.collect::<Vec<AuthorityId>>();
for authority_id_to_remove in authority_ids_to_remove {
// Remove other entries from `self.authority_id_to_addresses`.
let addresses = self.authority_id_to_addresses.remove(&authority_id_to_remove);
// Remove other entries from `self.peer_id_to_authority_id`.
let peer_ids = addresses
.iter()
.flatten()
.map(|a| peer_id_from_multiaddr(a))
.filter_map(|peer_id| peer_id);
for peer_id in peer_ids {
if let Some(id) = self.peer_id_to_authority_id.remove(&peer_id) {
debug_assert_eq!(authority_id_to_remove, id);
}
}
}
}
}
fn peer_id_from_multiaddr(addr: &Multiaddr) -> Option<PeerId> {
addr.iter().last().and_then(|protocol| {
if let Protocol::P2p(multihash) = protocol {
PeerId::from_multihash(multihash).ok()
} else {
None
}
})
}
#[cfg(test)]
mod tests {
use super::*;
use libp2p::multihash;
use quickcheck::{Arbitrary, Gen, QuickCheck, TestResult};
use rand::Rng;
use sp_authority_discovery::{AuthorityId, AuthorityPair};
use sp_core::crypto::Pair;
#[derive(Clone, Debug)]
struct TestAuthorityId(AuthorityId);
impl Arbitrary for TestAuthorityId {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
let seed: [u8; 32] = g.gen();
TestAuthorityId(AuthorityPair::from_seed_slice(&seed).unwrap().public())
}
}
#[derive(Clone, Debug)]
struct TestMultiaddr(Multiaddr);
impl Arbitrary for TestMultiaddr {
fn arbitrary<G: Gen>(g: &mut G) -> Self {
let seed: [u8; 32] = g.gen();
let peer_id =
PeerId::from_multihash(multihash::wrap(multihash::Code::Sha2_256, &seed)).unwrap();
let multiaddr = "/ip6/2001:db8:0:0:0:0:0:2/tcp/30333"
.parse::<Multiaddr>()
.unwrap()
.with(Protocol::P2p(peer_id.into()));
TestMultiaddr(multiaddr)
}
}
#[test]
fn retains_only_entries_of_provided_authority_ids() {
fn property(
first: (TestAuthorityId, TestMultiaddr),
second: (TestAuthorityId, TestMultiaddr),
third: (TestAuthorityId, TestMultiaddr),
) -> TestResult {
let first: (AuthorityId, Multiaddr) = ((first.0).0, (first.1).0);
let second: (AuthorityId, Multiaddr) = ((second.0).0, (second.1).0);
let third: (AuthorityId, Multiaddr) = ((third.0).0, (third.1).0);
let mut cache = AddrCache::new();
cache.insert(first.0.clone(), vec![first.1.clone()]);
cache.insert(second.0.clone(), vec![second.1.clone()]);
cache.insert(third.0.clone(), vec![third.1.clone()]);
let subset = cache.get_random_subset();
assert!(
subset.contains(&first.1) &&
subset.contains(&second.1) &&
subset.contains(&third.1),
"Expect initial subset to contain all authorities.",
);
assert_eq!(
Some(&vec![third.1.clone()]),
cache.get_addresses_by_authority_id(&third.0),
"Expect `get_addresses_by_authority_id` to return addresses of third authority."
);
assert_eq!(
Some(&third.0),
cache.get_authority_id_by_peer_id(&peer_id_from_multiaddr(&third.1).unwrap()),
"Expect `get_authority_id_by_peer_id` to return `AuthorityId` of third authority."
);
cache.retain_ids(&vec![first.0, second.0]);
let subset = cache.get_random_subset();
assert!(
subset.contains(&first.1) || subset.contains(&second.1),
"Expected both first and second authority."
);
assert!(!subset.contains(&third.1), "Did not expect address from third authority");
assert_eq!(
None,
cache.get_addresses_by_authority_id(&third.0),
"Expect `get_addresses_by_authority_id` to not return `None` for third authority."
);
assert_eq!(
None,
cache.get_authority_id_by_peer_id(&peer_id_from_multiaddr(&third.1).unwrap()),
"Expect `get_authority_id_by_peer_id` to return `None` for third authority."
);
TestResult::passed()
}
QuickCheck::new().max_tests(10).quickcheck(property as fn(_, _, _) -> TestResult)
}
}
|
use crate::*;
use std::ops::Deref;
use std::ops::{Index, IndexMut, Range};
const ARG_ARRAY_SIZE: usize = 8;
#[derive(Debug, Clone)]
pub struct Args {
pub block: Option<MethodRef>,
pub kw_arg: Option<Value>,
elems: ArgsArray,
}
impl Args {
pub fn new(len: usize) -> Self {
Args {
block: None,
kw_arg: None,
elems: ArgsArray::new(len),
}
}
pub fn push(&mut self, val: Value) {
self.elems.push(val);
}
pub fn new0() -> Self {
Args {
block: None,
kw_arg: None,
elems: ArgsArray::new0(),
}
}
pub fn new1(arg: Value) -> Self {
Args {
block: None,
kw_arg: None,
elems: ArgsArray::new1(arg),
}
}
pub fn new2(arg0: Value, arg1: Value) -> Self {
Args {
block: None,
kw_arg: None,
elems: ArgsArray::new2(arg0, arg1),
}
}
pub fn new3(
block: impl Into<Option<MethodRef>>,
arg0: Value,
arg1: Value,
arg2: Value,
) -> Self {
Args {
block: block.into(),
kw_arg: None,
elems: ArgsArray::new3(arg0, arg1, arg2),
}
}
pub fn len(&self) -> usize {
self.elems.len()
}
pub fn into_vec(self) -> Vec<Value> {
match self.elems {
ArgsArray::Array { ary, len } => ary[0..len].to_vec(),
ArgsArray::Vec(v) => v,
}
}
}
impl Index<usize> for Args {
type Output = Value;
fn index(&self, index: usize) -> &Self::Output {
&self.elems[index]
}
}
impl Index<Range<usize>> for Args {
type Output = [Value];
fn index(&self, range: Range<usize>) -> &Self::Output {
&self.elems[range]
}
}
impl IndexMut<usize> for Args {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
&mut self.elems[index]
}
}
impl Deref for Args {
type Target = [Value];
fn deref(&self) -> &Self::Target {
self.elems.deref()
}
}
#[derive(Debug, Clone)]
enum ArgsArray {
Array {
len: usize,
ary: [Value; ARG_ARRAY_SIZE],
},
Vec(Vec<Value>),
}
impl ArgsArray {
fn new(len: usize) -> Self {
if len <= ARG_ARRAY_SIZE {
ArgsArray::Array {
len,
ary: [Value::uninitialized(); ARG_ARRAY_SIZE],
}
} else {
ArgsArray::Vec(vec![Value::uninitialized(); len])
}
}
fn push(&mut self, val: Value) {
match self {
ArgsArray::Vec(ref mut v) => v.push(val),
ArgsArray::Array {
ref mut len,
ref mut ary,
} => {
if *len == ARG_ARRAY_SIZE {
let mut ary = ary.to_vec();
ary.push(val);
std::mem::replace(self, ArgsArray::Vec(ary));
} else {
ary[*len] = val;
*len += 1;
}
}
}
}
fn new0() -> Self {
ArgsArray::Array {
len: 0,
ary: [Value::uninitialized(); ARG_ARRAY_SIZE],
}
}
fn new1(arg: Value) -> Self {
let mut ary = [Value::uninitialized(); ARG_ARRAY_SIZE];
ary[0] = arg;
ArgsArray::Array { len: 1, ary }
}
fn new2(arg0: Value, arg1: Value) -> Self {
let mut ary = [Value::uninitialized(); ARG_ARRAY_SIZE];
ary[0] = arg0;
ary[1] = arg1;
ArgsArray::Array { len: 2, ary }
}
fn new3(arg0: Value, arg1: Value, arg2: Value) -> Self {
let mut ary = [Value::uninitialized(); ARG_ARRAY_SIZE];
ary[0] = arg0;
ary[1] = arg1;
ary[2] = arg2;
ArgsArray::Array { len: 3, ary }
}
fn len(&self) -> usize {
match self {
ArgsArray::Array { len, .. } => *len,
ArgsArray::Vec(v) => v.len(),
}
}
}
impl Index<usize> for ArgsArray {
type Output = Value;
fn index(&self, index: usize) -> &Self::Output {
match self {
ArgsArray::Array { ary, .. } => &ary[index],
ArgsArray::Vec(v) => &v[index],
}
}
}
impl Index<Range<usize>> for ArgsArray {
type Output = [Value];
fn index(&self, range: Range<usize>) -> &Self::Output {
match self {
ArgsArray::Array { ary, .. } => &ary[range],
ArgsArray::Vec(v) => &v[range],
}
}
}
impl IndexMut<usize> for ArgsArray {
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
match self {
ArgsArray::Array { ary, .. } => &mut ary[index],
ArgsArray::Vec(v) => &mut v[index],
}
}
}
impl Deref for ArgsArray {
type Target = [Value];
fn deref(&self) -> &Self::Target {
match self {
ArgsArray::Array { len, ary } => &ary[0..*len],
ArgsArray::Vec(v) => &v,
}
}
}
|
//! STM32L0x1 Clocks
//!
//! This module contains types representing the various clocks on the STM32L0x1, both internal and
//! external. Each clock's type implements a `configure` method (not a trait impl) that configures
//! the clock in the RCC peripheral.
use crate::power;
use crate::rcc;
use crate::time::Hertz;
mod private {
pub trait Sealed {}
impl Sealed for super::LowSpeedInternalRC {}
impl Sealed for super::MediumSpeedInternalRC {}
impl Sealed for super::HighSpeedInternal16RC {}
impl Sealed for super::LowSpeedExternalOSC {}
}
/// Types of clocks that have a frequency
pub trait ClkSrc: private::Sealed {
/// Returns the frequency of the clock, if the clock exists, else `None`.
fn freq(&self) -> Option<Hertz>;
}
/// Sources for the SYSCLK
pub enum SysClkSource {
/// Medium-speed internal RC
MSI,
/// High-speed 16MHz internal RC (optionally 4 MHz)
HSI16,
//HSE,
//PLLCLK,
}
/// Low-speed internal RC
pub struct LowSpeedInternalRC {
/// Should the LSI be powered on and released
enable: bool,
}
impl LowSpeedInternalRC {
/// Instantiate the LSI
pub(crate) fn new() -> Self {
LowSpeedInternalRC { enable: false }
}
/// Request that the LSI be enabled
pub fn enable(&mut self) {
self.enable = true
}
/// Request that the LSI be disabled
pub fn disable(&mut self) {
self.enable = false
}
/// Enable the LSI, and wait for it to become ready
pub fn configure(&self, csr: &mut rcc::CSR) -> Option<Hertz> {
if self.enable {
csr.inner().modify(|_, w| w.lsion().set_bit());
while csr.inner().read().lsion().bit_is_clear() {}
} else {
csr.inner().modify(|_, w| w.lsion().clear_bit());
while csr.inner().read().lsion().bit_is_set() {}
}
self.freq()
}
}
impl ClkSrc for LowSpeedInternalRC {
fn freq(&self) -> Option<Hertz> {
if self.enable {
Some(Hertz(37_000))
} else {
None
}
}
}
/// Onboard medium-speed internal RC clock source
pub struct MediumSpeedInternalRC {
/// Request that the the MSI RC be enabled/disabled
enable: bool,
/// The requested MSI RC frequency
freq: MsiFreq,
}
impl MediumSpeedInternalRC {
/// Create a new MSI RC instance
pub(crate) fn new(enable: bool, freq: MsiFreq) -> Self {
MediumSpeedInternalRC { enable, freq }
}
/// Request that the MSI be enabled
pub fn enable(&mut self) {
self.enable = true;
}
/// Request that the MSI be disabled
pub fn disable(&mut self) {
self.enable = false
}
/// Set the desired MSI frequency range
pub fn set_freq(&mut self, f: MsiFreq) {
self.freq = f;
}
/// Convert the freq range to MSIRANGE bits (7.3.2)
pub fn bits(&self) -> u8 {
self.freq as u8
}
/// Configures the MSI to the specified frequency
pub(crate) fn configure(&self, icscr: &mut rcc::ICSCR, cr: &mut rcc::CR) -> Option<Hertz> {
if self.enable {
icscr.inner().modify(|_, w| w.msirange().bits(self.bits()));
cr.inner().modify(|_, w| w.msion().set_bit());
while cr.inner().read().msirdy().bit_is_clear() {}
} else {
cr.inner().modify(|_, w| w.msion().clear_bit());
while cr.inner().read().msirdy().bit_is_set() {}
}
self.freq()
}
}
impl ClkSrc for MediumSpeedInternalRC {
/// Retrieve the desired MSI RC frequency range
fn freq(&self) -> Option<Hertz> {
if self.enable {
Some(match self.freq {
MsiFreq::Hz_65_536 => Hertz(65_536),
MsiFreq::Hz_131_072 => Hertz(131_072),
MsiFreq::Hz_262_144 => Hertz(262_144),
MsiFreq::Hz_524_288 => Hertz(524_288),
MsiFreq::Hz_1_048_000 => Hertz(1_048_000),
MsiFreq::Hz_2_097_000 => Hertz(2_097_000),
MsiFreq::Hz_4_194_000 => Hertz(4_194_000),
})
} else {
None
}
}
}
#[repr(u8)]
#[derive(Copy, Clone)]
#[allow(non_camel_case_types)]
/// Available MSI RC frequency ranges
pub enum MsiFreq {
/// 65.536 kHz
Hz_65_536 = 0b000,
/// 131.072 kHz
Hz_131_072 = 0b001,
/// 262.144 kHz
Hz_262_144 = 0b010,
/// 524.288 kHz
Hz_524_288 = 0b011,
/// 1.048 MHz
Hz_1_048_000 = 0b100,
/// 2.097 MHz
Hz_2_097_000 = 0b101,
/// 4.194 MHz
Hz_4_194_000 = 0b110,
}
/// Onboard high-speed internal RC clock source
pub struct HighSpeedInternal16RC {
/// Request that the HSI16 be enabled/disabled
enable: bool,
/// Should the HSI16 clock be prescaled by 4
div4: bool,
}
impl HighSpeedInternal16RC {
/// Instantiate a new HSI16
pub(crate) fn new() -> Self {
HighSpeedInternal16RC {
enable: false,
div4: false,
}
}
/// Request that the HSI16 RC be enabled
pub fn enable(&mut self) {
self.enable = true;
}
/// Request that the HSI16 RC be disabled
pub fn disable(&mut self) {
self.enable = false
}
/// Return whether the HSI16 clock will be divided by 4
pub fn is_div4(&self) -> bool {
self.div4
}
/// Request that the HSI16 clock be divided by 4
pub fn div4(&mut self) {
self.div4 = true;
}
/// Request that the HSI16 clock not be divided
pub fn no_div(&mut self) {
self.div4 = false;
}
/// Applies the selection options to the configuration registers and turns the clock on
pub(crate) fn configure(&self, icscr: &mut rcc::ICSCR, cr: &mut rcc::CR) -> Option<Hertz> {
if self.enable {
icscr.inner().modify(|_, w| w.hsi16trim().bits(0x10)); // 16 is the default value
cr.inner().modify(|_, w| w.hsi16on().set_bit());
while cr.inner().read().hsi16rdyf().bit_is_clear() {}
if self.div4 {
cr.inner().modify(|_, w| w.hsi16diven().set_bit());
while cr.inner().read().hsi16divf().bit_is_clear() {}
} else {
cr.inner().modify(|_, w| w.hsi16diven().clear_bit());
while cr.inner().read().hsi16divf().bit_is_set() {}
}
} else {
cr.inner().modify(|_, w| w.hsi16on().clear_bit());
while cr.inner().read().hsi16rdyf().bit_is_set() {}
}
self.freq()
}
}
impl ClkSrc for HighSpeedInternal16RC {
/// Retrieve the desired HSI16 RC frequency
fn freq(&self) -> Option<Hertz> {
if self.enable {
if self.div4 {
Some(Hertz(4_000_000))
} else {
Some(Hertz(16_000_000))
}
} else {
None
}
}
}
#[derive(Default)]
/// Optional external low-speed 32 kHz oscillator
pub struct LowSpeedExternalOSC {
/// Indicate that the LSE should be turned on
enable: bool,
}
impl LowSpeedExternalOSC {
/// Create a new LSE
pub fn new() -> Self {
LowSpeedExternalOSC { enable: true }
}
/// Indicate that the LSE should be turned on
pub fn enable(&mut self) {
self.enable = true;
}
/// Indicate that the LSE should not be turned on
pub fn disable(&mut self) {
self.enable = false;
}
/// Enable the LSE, and wait for it to become ready
pub(crate) fn configure<VDD, VCORE, RTC>(
&self,
csr: &mut rcc::CSR,
pwr: &mut power::Power<VDD, VCORE, RTC>,
) -> Option<Hertz>
where
VCORE: power::Vos,
{
pwr.dbp_context(|| {
if self.enable {
csr.inner().modify(|_, w| w.lseon().set_bit());
while csr.inner().read().lseon().bit_is_clear() {}
} else {
csr.inner().modify(|_, w| w.lseon().clear_bit());
while csr.inner().read().lseon().bit_is_set() {}
}
});
self.freq()
}
}
impl ClkSrc for LowSpeedExternalOSC {
/// Retrieve the LSE frequency
fn freq(&self) -> Option<Hertz> {
if self.enable {
Some(Hertz(32_768))
} else {
None
}
}
}
/// Available clocks with which a USART can be driven.
pub enum USARTClkSource {
/// U(S)ART-specific peripheral clock (PCLK1, PCLK2)
PCLK,
/// Core system clock
SYSCLK,
/// High-speed 16 MHz RC
HSI16,
/// Low-speed external osc
LSE,
}
/// Available clock sources that can drive the low-power timer
pub enum LPTimerClkSource {
/// Low-speed external 32kHz oscillator
Lse,
/// High-speed internal 16 MHz RC
Hsi16,
/// SYSCLK clock
Sysclk,
/// PCLK
Pclk,
}
|
pub fn run() {
let _arr1 = [1, 2, 3];
let _arr2 = _arr1;
let vec1 = vec![1, 2, 3];
let vec2 = &vec1;
println!("Values: {:?}", (&vec1, vec2))
}
|
use irc::client::Client;
use irc::client::data::user::AccessLevel;
use irc::proto::{Command, Message};
use std::str;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::collections::HashMap;
use std::sync::Arc;
use tokio::process;
use async_recursion::async_recursion;
use once_cell::sync::Lazy;
use regex::Regex;
use anyhow::{anyhow, bail, ensure, Result};
use parking_lot::Mutex;
use lazy_static::lazy_static;
use directories::ProjectDirs;
use super::config::{Rtd, HighlightMode};
lazy_static! {
static ref CANONICALIZATION_CACHE: Arc<Mutex<HashMap<YoutubeDescriptor, CanonicalizedYoutubeDescriptor>>> = {
let mut map = HashMap::with_capacity(230000);
// Load in channel-user-map because we have a lot of existing /user/ folders,
// from before it became impossible to reliably find the /user/ for a channel.
let dirs = ProjectDirs::from("org", "", "youtube-irc-bot").unwrap();
let channel_user_map_path = dirs.config_dir().join("channel-user-map");
let file = File::open(channel_user_map_path).unwrap();
let reader = BufReader::new(file);
for line in reader.lines() {
let line = line.unwrap();
let (channel, user) = line.split_once('\t').unwrap();
let canonicalized = CanonicalizedYoutubeDescriptor {
kind: FetchType::User,
id: String::from(user),
channel_id: Some(String::from(channel)),
folder: String::from(user),
};
map.insert(YoutubeDescriptor::Channel(channel.into()), canonicalized);
}
Arc::new(Mutex::new(map))
};
}
fn canonicalization_cache_entries_count() -> usize {
let cache = Arc::clone(&CANONICALIZATION_CACHE);
let cache = cache.lock();
cache.len()
}
async fn contents_for_url(url: &str) -> Result<String> {
let output = process::Command::new("get-youtube-page").arg(url).output().await?;
let body = str::from_utf8(&output.stdout)?;
Ok(body.into())
}
fn extract_channel_id(page_contents: &str, url: &str) -> Result<String> {
static ITEMPROP_CHANNEL_ID: &Lazy<Regex> = lazy_regex!(r#"<meta itemprop="channelId" content="(UC[-_0-9a-zA-Z]{22})">"#);
static META_OG_URL: &Lazy<Regex> = lazy_regex!(r#"<meta property="og:url" content="https://www.youtube.com/channel/(UC[-_0-9a-zA-Z]{22})">"#);
static ITEMPROP_URL: &Lazy<Regex> = lazy_regex!(r#"<link itemprop="url" href="https://www.youtube.com/channel/(UC[-_0-9a-zA-Z]{22})">"#);
for regexp in [ITEMPROP_CHANNEL_ID, META_OG_URL, ITEMPROP_URL] {
if let Some(captures) = regexp.captures(page_contents) {
if captures.len() >= 1 {
return Ok(String::from(captures.get(1).unwrap().as_str()));
}
}
}
bail!("Could not get channel identifier for {}", url)
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum FetchType {
User,
Channel,
Playlist,
Video,
Folder,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct CanonicalizedYoutubeDescriptor {
id: String,
channel_id: Option<String>,
folder: String,
kind: FetchType,
}
impl CanonicalizedYoutubeDescriptor {
pub fn to_url(&self) -> String {
match self.kind {
FetchType::User => format!("https://www.youtube.com/user/{}/videos", self.id),
FetchType::Channel => format!("https://www.youtube.com/channel/{}/videos", self.id),
FetchType::Playlist => format!("https://www.youtube.com/playlist?list={}", self.id),
FetchType::Video => format!("https://www.youtube.com/watch?v={}", self.id),
FetchType::Folder => unimplemented!(),
}
}
pub fn folder(&self) -> String {
self.folder.clone()
}
pub fn channel_id(&self) -> Option<String> {
self.channel_id.clone()
}
pub fn task_name(&self) -> String {
if self.kind == FetchType::Video {
format!("{}-{}", &self.folder, self.id)
} else {
self.folder.clone()
}
}
}
#[allow(clippy::let_and_return)]
fn fix_youtube_url(url: &str) -> String {
let url = url.replace("youtube-nocookie.com/", "youtube.com/");
let url = url.replace("http://", "https://");
let url = url.replace("https://music.youtube.com/", "https://www.youtube.com/");
let url = url.replace("https://m.youtube.com/", "https://www.youtube.com/");
let url = url.replace("https://youtube.com/", "https://www.youtube.com/");
let url = url.replace("https://youtu.be/", "https://www.youtube.com/");
url
}
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub enum YoutubeDescriptor {
User(String),
Channel(String),
Custom(String),
Short(String),
Playlist(String),
Video(String),
}
impl YoutubeDescriptor {
pub fn from_url(url: &str) -> Result<YoutubeDescriptor> {
// Allow e.g. https://youtu.be/jmPZztKIFf4 and https://invidio.us/jmPZztKIFf4
static WATCH_RE: &Lazy<Regex> = lazy_regex!(r#"/(?:watch.*[\?&]v=)?([-_A-Za-z0-9]{11})([#\&\?].*)?\z"#);
static EMBED_RE: &Lazy<Regex> = lazy_regex!(r#"/embed/([-_A-Za-z0-9]{11})([#\&\?].*)?\z"#);
static PLAYLIST_RE: &Lazy<Regex> = lazy_regex!(r#"/playlist.*[\?&]list=([A-Z]{2}[-_A-Za-z0-9]{16,128})([#\&].*)?\z"#);
static CHANNEL_RE: &Lazy<Regex> = lazy_regex!(r#"/channel/(UC[-_A-Za-z0-9]{22})([/#\?].*)?\z"#);
static USER_RE: &Lazy<Regex> = lazy_regex!(r#"/user/([A-Za-z0-9]{1,20})([/#\&\?].*)?\z"#);
static CUSTOM_RE: &Lazy<Regex> = lazy_regex!(r#"/c/([\S+]*?)([/#\&\?].*)?\z"#);
static SHORT_RE: &Lazy<Regex> = lazy_regex!(r#"\Ahttps://www\.youtube\.com/([^/]+)"#);
let url = fix_youtube_url(url);
if let Some(matches) = EMBED_RE.captures(&url) {
return Ok(YoutubeDescriptor::Video(matches.get(1).unwrap().as_str().to_string()));
}
if let Some(matches) = PLAYLIST_RE.captures(&url) {
return Ok(YoutubeDescriptor::Playlist(matches.get(1).unwrap().as_str().to_string()));
}
if let Some(matches) = CHANNEL_RE.captures(&url) {
return Ok(YoutubeDescriptor::Channel(matches.get(1).unwrap().as_str().to_string()));
}
if let Some(matches) = USER_RE.captures(&url) {
return Ok(YoutubeDescriptor::User(matches.get(1).unwrap().as_str().to_string()));
}
if let Some(matches) = CUSTOM_RE.captures(&url) {
return Ok(YoutubeDescriptor::Custom(matches.get(1).unwrap().as_str().to_string()));
}
if let Some(matches) = WATCH_RE.captures(&url) {
return Ok(YoutubeDescriptor::Video(matches.get(1).unwrap().as_str().to_string()));
}
if let Some(matches) = SHORT_RE.captures(&url) {
let first = matches.get(1).unwrap().as_str();
match first {
"channel" | "user" | "c" | "playlist" | "watch" => {},
_ => return Ok(YoutubeDescriptor::Short(first.to_string())),
}
}
bail!("Unsupported URL: {}", url)
}
pub fn to_url(&self) -> String {
match self {
YoutubeDescriptor::User(id) => format!("https://www.youtube.com/user/{id}/videos"),
YoutubeDescriptor::Channel(id) => format!("https://www.youtube.com/channel/{id}/videos"),
YoutubeDescriptor::Custom(id) => format!("https://www.youtube.com/c/{id}/videos"),
YoutubeDescriptor::Short(id) => format!("https://www.youtube.com/{id}/videos"),
YoutubeDescriptor::Playlist(id) => format!("https://www.youtube.com/playlist?list={id}"),
YoutubeDescriptor::Video(id) => format!("https://www.youtube.com/watch?v={id}"),
}
}
/// Turn Channel into User if possible, because that's how our data storage works.
/// Turn User into properly-cased User.
#[async_recursion(?Send)]
pub async fn canonicalize(&self) -> Result<CanonicalizedYoutubeDescriptor> {
{
let cache = Arc::clone(&CANONICALIZATION_CACHE);
let cache = cache.lock();
if let Some(canonicalized) = cache.get(self) {
return Ok(canonicalized.clone());
}
}
let canonicalized = match self {
YoutubeDescriptor::Video(id) => {
let contents = contents_for_url(&self.to_url()).await?;
let channel_id = extract_channel_id(&contents, &self.to_url())?;
let folder = YoutubeDescriptor::Channel(channel_id.clone()).canonicalize().await?.folder();
CanonicalizedYoutubeDescriptor { kind: FetchType::Video, id: id.clone(), channel_id: Some(channel_id), folder }
}
YoutubeDescriptor::Playlist(id) => {
CanonicalizedYoutubeDescriptor { kind: FetchType::Playlist, id: id.clone(), channel_id: None, folder: id.clone() }
}
YoutubeDescriptor::Channel(channel_id) => {
// If we have a channel id, we assume it's correct and don't hit the /channel/ page
// because it may be geoblocked.
let cache = Arc::clone(&CANONICALIZATION_CACHE);
let cache = cache.lock();
if let Some(canonicalized) = cache.get(&YoutubeDescriptor::Channel(channel_id.clone())) {
return Ok(canonicalized.clone());
}
CanonicalizedYoutubeDescriptor { kind: FetchType::Channel, id: channel_id.clone(), channel_id: Some(channel_id.clone()), folder: channel_id.clone() }
}
YoutubeDescriptor::User(_) | YoutubeDescriptor::Custom(_) | YoutubeDescriptor::Short(_) => {
let contents = contents_for_url(&self.to_url()).await?;
let channel_id = extract_channel_id(&contents, &self.to_url())?;
let cache = Arc::clone(&CANONICALIZATION_CACHE);
let cache = cache.lock();
if let Some(canonicalized) = cache.get(&YoutubeDescriptor::Channel(channel_id.clone())) {
return Ok(canonicalized.clone());
}
CanonicalizedYoutubeDescriptor { kind: FetchType::Channel, id: channel_id.clone(), channel_id: Some(channel_id.clone()), folder: channel_id }
}
};
let cache = Arc::clone(&CANONICALIZATION_CACHE);
let mut cache = cache.lock();
cache.insert(self.clone(), canonicalized.clone());
Ok(canonicalized)
}
}
async fn parallel_archive(original_url: &str, descriptor: &CanonicalizedYoutubeDescriptor) -> Result<String> {
let folder = descriptor.folder();
let task_name = descriptor.task_name();
let sessions = get_downloader_sessions().await?;
if let Some(_session) = sessions.iter().find(|session| session.identifier == task_name) {
return Ok(format!("Not grabbing {} -> {} because it is already running in task {task_name}", &original_url, &folder));
}
match descriptor.kind {
FetchType::Video => {
let limit = get_tasks_limit().await?;
if limit == 0 {
return Ok(format!("Can't archive {} because submissions are currently disabled", &original_url));
}
let count = count_tasks().await?;
if count >= limit {
return Ok(format!("Can't archive {} because {count}/{limit} tasks are running", &original_url));
}
let output = process::Command::new("grab-youtube-video")
.arg(&folder).arg(&descriptor.to_url())
.output().await?;
let _ = str::from_utf8(&output.stdout)?;
},
FetchType::Channel | FetchType::User | FetchType::Folder | FetchType::Playlist => {
let limit = get_task_starters_limit().await?;
if limit == 0 {
return Ok(format!("Can't archive {} -> {} because submissions are currently disabled", &original_url, &folder));
}
let count = count_task_starters().await?;
if count >= limit {
return Ok(format!("Can't archive {} -> {} because {count}/{limit} task starters are running", &original_url, &folder));
}
let output = process::Command::new("grab-youtube-channel")
.arg(&folder)
.arg("999999")
.env("GET_NEW_VARIANT", "parallel-get-new")
.output().await?;
let _ = str::from_utf8(&output.stdout)?;
}
}
Ok(format!("Grabbing {} -> {}; check {} later", &original_url, &folder, logs_url(&folder)))
}
fn assert_valid_task_name(task: &str) -> Result<()> {
static TASK_NAME_RE: &Lazy<Regex> = lazy_regex!(r"\A[-_A-Za-z0-9]+\z");
ensure!(TASK_NAME_RE.is_match(task), "Invalid task name: {}", task);
Ok(())
}
async fn abort(task: &str) -> Result<String> {
assert_valid_task_name(task)?;
let _output = process::Command::new("abort-youtube-task")
.arg(task)
.output().await?;
Ok(format!("Aborted {}", &task))
}
async fn remove_folder_from_lists(folder: &str) -> Result<String> {
assert_valid_task_name(folder)?;
let _output = process::Command::new("remove-folder-from-lists")
.arg(folder)
.output().await?;
Ok(format!("Removed {} from lists", &folder))
}
fn logs_url(folder: &str) -> String {
format!("https://ya.borg.xyz/logs/dl/{}/", &folder)
}
async fn check_folder(folder: &str, channel_id: Option<&str>) -> Result<String> {
assert_valid_task_name(folder)?;
let videos_fut = async {
get_file_listing(folder).await
.map_err(|_| anyhow!("Internal error listing files for {}", folder))
};
let n_dead_fut = dead_capture_count(folder);
let n_youtube_fut = async {
let mut n_youtube_text = String::new();
if let Some(channel_id) = channel_id {
let n_youtube = get_video_count_at_youtube(channel_id).await;
if let Ok(count) = n_youtube {
let word = if count == 1 { "video" } else { "videos" };
n_youtube_text = format!("YouTube has {count}+ {word}; ");
}
}
Ok(n_youtube_text)
};
let (videos, n_dead, n_youtube_text) = futures::try_join!(videos_fut, n_dead_fut, n_youtube_fut)?;
let word = if videos.len() == 1 { "video" } else { "videos" };
let latest_videos = videos.iter().take(4).collect::<Vec<_>>();
Ok(format!("{n_youtube_text}stash has {} {word} ({n_dead} dead) in folder {} ({}); latest {latest_videos:?}", videos.len(), &folder, logs_url(folder)))
}
async fn dead_capture_count(folder: &str) -> Result<u32> {
let output = process::Command::new("dead-capture-count").arg(folder).output().await?;
let count: u32 = str::from_utf8(&output.stdout)?.trim().parse()?;
Ok(count)
}
async fn check_stash(descriptor: &YoutubeDescriptor) -> Result<String> {
if let YoutubeDescriptor::Video(video_id) = descriptor {
let output = process::Command::new("tubekit").arg("capture").arg("count").arg("--").arg(video_id).output().await?;
let count: u32 = str::from_utf8(&output.stdout)?.trim().parse()?;
let word = if count == 1 { "capture" } else { "captures" };
let comments_s = if count > 0 {
let output = process::Command::new("get-capture-comment-counts").arg(video_id).output().await?;
let output_s = str::from_utf8(&output.stdout)?.trim().to_string();
let counts: Vec<&str> = output_s.split('\n').collect();
if let Some(&last_count) = counts.last() {
let word = if last_count == "1" { "comment" } else { "comments" };
format!(" ({} {word})", counts.join(", "))
} else {
" (? comments)".to_string()
}
} else {
String::from("")
};
Ok(format!("stash has {count} {word}{comments_s} of {}", descriptor.to_url()))
} else {
let canonicalized = descriptor.canonicalize().await?;
let folder = canonicalized.folder();
check_folder(&folder, canonicalized.channel_id.as_deref()).await
}
}
async fn get_file_listing(folder: &str) -> Result<Vec<String>> {
let output = process::Command::new("get-video-files-for-folder").arg(folder).output().await?;
let stdout_utf8 = str::from_utf8(&output.stdout)?;
Ok(stdout_utf8.lines().map(String::from).collect())
}
#[derive(Debug)]
struct DownloaderSession {
identifier: String,
#[allow(dead_code)]
start_time: u64,
}
async fn stop_scripts() -> Result<String> {
let _ = process::Command::new("stop-all-youtube-scripts").output().await?;
Ok("Stopped all scripts".to_string())
}
async fn get_tasks_limit() -> Result<usize> {
let output = process::Command::new("get-max-youtube-tasks").output().await?;
let stdout_utf8 = str::from_utf8(&output.stdout)?;
Ok(stdout_utf8.trim().parse()?)
}
async fn get_task_starters_limit() -> Result<usize> {
let output = process::Command::new("get-max-youtube-task-starters").output().await?;
let stdout_utf8 = str::from_utf8(&output.stdout)?;
Ok(stdout_utf8.trim().parse()?)
}
async fn count_tasks() -> Result<usize> {
let output = process::Command::new("count-youtube-tasks").output().await?;
let stdout_utf8 = str::from_utf8(&output.stdout)?;
Ok(stdout_utf8.trim().parse()?)
}
async fn count_task_starters() -> Result<usize> {
let output = process::Command::new("count-youtube-task-starters").output().await?;
let stdout_utf8 = str::from_utf8(&output.stdout)?;
Ok(stdout_utf8.trim().parse()?)
}
async fn get_status() -> Result<String> {
Ok(format!("{}/{} tasks, {}/{} task starters running",
count_tasks().await?,
get_tasks_limit().await?,
count_task_starters().await?,
get_task_starters_limit().await?
))
}
async fn get_video_count_at_youtube(channel_id: &str) -> Result<u32> {
let output = process::Command::new("get-video-count-at-youtube").arg(channel_id).output().await?;
let stdout_utf8 = str::from_utf8(&output.stdout)?;
Ok(stdout_utf8.trim().parse()?)
}
fn send_password(user: &str, client: &Client, rtd: &Rtd) -> Result<String> {
let response = format!("Username: {} Password: {}", rtd.conf.web.username, rtd.conf.web.password);
client.send_privmsg(user, response).unwrap();
Ok("I PMed it to you.".into())
}
fn get_help() -> Result<String> {
Ok(
"Usage: \
!help | \
!status | \
!password | \
!s <URL or folder> | \
!a <URL> | \
!sa <URL> | \
!abort <task> | \
!delist <task> | \
!stopscripts | \
!proxycc | \
!proxy <host> | \
!reproxy <host> | If you need help with something, just ask and keep your client connected for a long time".to_string()
)
}
async fn get_proxy_country_codes() -> Result<String> {
let output = process::Command::new("get-youtube-proxy-countries").output().await?;
let stdout_utf8 = str::from_utf8(&output.stdout)?;
Ok(format!("Countries: {stdout_utf8}"))
}
async fn get_proxy(host: &str) -> Result<String> {
let output = process::Command::new("get-youtube-proxy").arg(host).output().await?;
let stdout_utf8 = str::from_utf8(&output.stdout)?;
Ok(format!("Proxy on {host}: {stdout_utf8}"))
}
async fn rotate_proxy(host: &str) -> Result<String> {
let _ = process::Command::new("rotate-youtube-proxy").arg(host).output().await?;
Ok(format!("Started proxy rotation on {host}"))
}
async fn get_downloader_sessions() -> Result<Vec<DownloaderSession>> {
let output = process::Command::new("tmux")
.arg("list-sessions")
.arg("-F")
.arg("#{session_created} #S")
.output().await?;
let stdout_utf8 = str::from_utf8(&output.stdout)?;
let sessions =
stdout_utf8.lines()
.filter_map(|line| {
let parts = line.splitn(2, ' ').collect::<Vec<&str>>();
let start_time = parts.first().unwrap().parse::<u64>().unwrap();
let session_name = parts.get(1).unwrap();
if session_name.starts_with("YouTube-") {
let identifier = session_name.replacen("YouTube-", "", 1);
Some(DownloaderSession { identifier, start_time })
} else {
None
}
}).collect();
Ok(sessions)
}
const ALPHA_REGULAR: &str = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
const ALPHA_CYRILLIC: &str = "АВСDЕFGHІЈКLМNОРQRSТUVWХYZаbсdеfghіјklmnорqrstuvwхyz";
const ALPHA_FRAKTUR: &str = "𝔄𝔅ℭ𝔇𝔈𝔉𝔊ℌℑ𝔍𝔎𝔏𝔐𝔑𝔒𝔓𝔔ℜ𝔖𝔗𝔘𝔙𝔚𝔛𝔜ℨ𝔞𝔟𝔠𝔡𝔢𝔣𝔤𝔥𝔦𝔧𝔨𝔩𝔪𝔫𝔬𝔭𝔮𝔯𝔰𝔱𝔲𝔳𝔴𝔵𝔶𝔷";
const ALPHA_FRAKTUR_BOLD: &str = "𝕬𝕭𝕮𝕯𝕰𝕱𝕲𝕳𝕴𝕵𝕶𝕷𝕸𝕹𝕺𝕻𝕼𝕽𝕾𝕿𝖀𝖁𝖂𝖃𝖄𝖅𝖆𝖇𝖈𝖉𝖊𝖋𝖌𝖍𝖎𝖏𝖐𝖑𝖒𝖓𝖔𝖕𝖖𝖗𝖘𝖙𝖚𝖛𝖜𝖝𝖞𝖟";
const ALPHA_SCRIPT: &str = "𝒜ℬ𝒞𝒟ℰℱ𝒢ℋℐ𝒥𝒦ℒℳ𝒩𝒪𝒫𝒬ℛ𝒮𝒯𝒰𝒱𝒲𝒳𝒴𝒵𝒶𝒷𝒸𝒹ℯ𝒻ℊ𝒽𝒾𝒿𝓀𝓁𝓂𝓃ℴ𝓅𝓆𝓇𝓈𝓉𝓊𝓋𝓌𝓍𝓎𝓏";
const ALPHA_BOLD: &str = "𝐀𝐁𝐂𝐃𝐄𝐅𝐆𝐇𝐈𝐉𝐊𝐋𝐌𝐍𝐎𝐏𝐐𝐑𝐒𝐓𝐔𝐕𝐖𝐗𝐘𝐙𝐚𝐛𝐜𝐝𝐞𝐟𝐠𝐡𝐢𝐣𝐤𝐥𝐦𝐧𝐨𝐩𝐪𝐫𝐬𝐭𝐮𝐯𝐰𝐱𝐲𝐳";
const ALPHA_ITALIC: &str = "𝐴𝐵𝐶𝐷𝐸𝐹𝐺𝐻𝐼𝐽𝐾𝐿𝑀𝑁𝑂𝑃𝑄𝑅𝑆𝑇𝑈𝑉𝑊𝑋𝑌𝑍𝑎𝑏𝑐𝑑𝑒𝑓𝑔ℎ𝑖𝑗𝑘𝑙𝑚𝑛𝑜𝑝𝑞𝑟𝑠𝑡𝑢𝑣𝑤𝑥𝑦𝑧";
const ALPHA_BOLD_ITALIC: &str = "𝑨𝑩𝑪𝑫𝑬𝑭𝑮𝑯𝑰𝑱𝑲𝑳𝑴𝑵𝑶𝑷𝑸𝑹𝑺𝑻𝑼𝑽𝑾𝑿𝒀𝒁𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛";
fn replace_matching_characters(input_str: &str, from_set: &str, to_set: &str) -> String {
let mapping = from_set.chars().zip(to_set.chars()).collect::<HashMap<_, _>>();
input_str
.chars()
.map(|c| *mapping.get(&c).unwrap_or(&c))
.collect::<String>()
}
fn highlight_for_user(user: &str, rtd: &Rtd) -> String {
let user_highlights = &rtd.conf.user_highlights;
if let Some(mode) = user_highlights.get(user) {
match mode {
HighlightMode::Normal => user.to_string(),
HighlightMode::Cyrillic => replace_matching_characters(user, ALPHA_REGULAR, ALPHA_CYRILLIC),
HighlightMode::Fraktur => replace_matching_characters(user, ALPHA_REGULAR, ALPHA_FRAKTUR),
HighlightMode::FrakturBold => replace_matching_characters(user, ALPHA_REGULAR, ALPHA_FRAKTUR_BOLD),
HighlightMode::Script => replace_matching_characters(user, ALPHA_REGULAR, ALPHA_SCRIPT),
HighlightMode::Bold => replace_matching_characters(user, ALPHA_REGULAR, ALPHA_BOLD),
HighlightMode::Italic => replace_matching_characters(user, ALPHA_REGULAR, ALPHA_ITALIC),
HighlightMode::BoldItalic => replace_matching_characters(user, ALPHA_REGULAR, ALPHA_BOLD_ITALIC),
}
} else {
user.to_string()
}
}
fn send_reply(client: &Client, channel: &str, user: &str, result: Result<String>, rtd: &Rtd) {
let user = highlight_for_user(user, rtd);
match result {
Ok(reply) => client.send_privmsg(channel, format!("{user}: {reply}")).unwrap(),
Err(err) => client.send_privmsg(channel, format!("{user}: error: {err}")).unwrap(),
}
}
fn extract_url(msg: &str) -> Result<&str> {
let url = msg.split(' ').take(2).last().unwrap();
ensure!(url.len() <= 600, "URL too long");
Ok(url)
}
fn debug() -> Result<String> {
let count = canonicalization_cache_entries_count();
Ok(format!("Entries in canonicalization cache: {count}"))
}
fn check_archiving_allowed(rtd: &Rtd) -> Result<()> {
if !rtd.conf.archiving.allow_archiving {
bail!("archiving is currently suspended, see /topic for more information");
}
Ok(())
}
pub async fn dispatch_message(message: &str, user: &str, client: &Client, rtd: &Rtd, check_authorization: impl Fn() -> Result<()>) -> Result<Vec<Result<String>>> {
let message = message.trim_end();
Ok(match message {
"!help" => {
vec![get_help()]
},
"!status" => {
vec![get_status().await]
},
"!debug" => {
check_authorization()?;
vec![debug()]
},
"!password" => {
vec![send_password(user, client, rtd)]
},
"!stopscripts" => {
check_authorization()?;
vec![stop_scripts().await]
},
"!proxycc" => {
vec![get_proxy_country_codes().await]
},
msg if msg.starts_with("!proxy ") => {
check_authorization()?;
let mut args: Vec<_> = msg.split(' ').skip(1).take(1).collect();
let host = args.pop().ok_or_else(|| anyhow!("need a host argument"))?;
vec![get_proxy(host).await]
},
msg if msg.starts_with("!reproxy ") => {
check_authorization()?;
let mut args: Vec<_> = msg.split(' ').skip(1).take(1).collect();
let host = args.pop().ok_or_else(|| anyhow!("need a host argument"))?;
vec![rotate_proxy(host).await]
},
msg if msg.starts_with("!s ") => {
let url_or_folder = extract_url(msg)?;
if url_or_folder.starts_with("https://") || url_or_folder.starts_with("http://") {
let descriptor = YoutubeDescriptor::from_url(url_or_folder)?;
vec![check_stash(&descriptor).await]
} else {
vec![check_folder(url_or_folder, None).await]
}
},
msg if msg.starts_with("!a ") => {
check_authorization()?;
check_archiving_allowed(rtd)?;
let url = extract_url(msg)?;
let canonicalized = YoutubeDescriptor::from_url(url)?.canonicalize().await?;
vec![parallel_archive(url, &canonicalized).await]
},
msg if msg.starts_with("!sa ") => {
check_authorization()?;
check_archiving_allowed(rtd)?;
let url = extract_url(msg)?;
let descriptor = YoutubeDescriptor::from_url(url)?;
let canonicalized = descriptor.canonicalize().await?;
vec![
check_stash(&descriptor).await,
parallel_archive(url, &canonicalized).await
]
},
msg if msg.starts_with("!abort ") => {
check_authorization()?;
let task = extract_url(msg)?;
vec![abort(task).await]
},
msg if msg.starts_with("!delist ") => {
check_authorization()?;
let mut args: Vec<_> = msg.split(' ').skip(1).take(1).collect();
let folder = args.pop().ok_or_else(|| anyhow!("need a folder argument"))?;
vec![remove_folder_from_lists(folder).await]
},
_other => vec![],
})
}
const NO_VOICE_MESSAGE: &str =
"need voice (+v) or higher authorization, please ping someone if urgent, or describe what you are archiving";
fn access(client: &Client, channel: &str, nick: &str) -> Option<AccessLevel> {
Some(client
.list_users(channel)?
.into_iter()
.find(|x| x.get_nickname() == nick)?
.highest_access_level()
)
}
pub async fn handle_message(client: &Client, message: &Message, rtd: &Rtd) {
// print the message if debug flag is set
if rtd.args.flag_debug {
eprintln!("{:?}", message.command)
}
// match on message type
let (_target, msg) = match message.command {
Command::PRIVMSG(ref target, ref msg) => (target, msg),
_ => return,
};
let nick = message.source_nickname().unwrap();
let channel = match message.response_target() {
Some(channel) => channel,
_ => return,
};
let check_authorization = || {
// Don't require voice for nothere because ii isn't very good
// at polling its state and messaging NickServ
if nick == "nothere" {
return Ok(());
}
let level = access(client, channel, nick).unwrap_or(AccessLevel::Member);
if level == AccessLevel::Member {
send_reply(client, channel, nick, Ok(NO_VOICE_MESSAGE.into()), rtd);
bail!("Not authorized");
}
Ok(())
};
let replies = dispatch_message(msg, nick, client, rtd, check_authorization).await;
match replies {
Err(err) => {
send_reply(client, channel, nick, Err(err), rtd);
},
Ok(replies) => {
for reply in replies.into_iter() {
send_reply(client, channel, nick, reply, rtd);
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_descriptor() {
for trailing_crud in ["", "?", "?stuff", "#", "#stuff"] {
assert_eq!(
YoutubeDescriptor::from_url(&format!("https://www.youtube.com/channel/UChBBWt5H8uZW1LSOh_aPt2Q{trailing_crud}")).unwrap(),
YoutubeDescriptor::Channel("UChBBWt5H8uZW1LSOh_aPt2Q".to_string()));
assert_eq!(
YoutubeDescriptor::from_url(&format!("https://www.youtube.com/channel/UChBBWt5H8uZW1LSOh_aPt2Q/videos{trailing_crud}")).unwrap(),
YoutubeDescriptor::Channel("UChBBWt5H8uZW1LSOh_aPt2Q".to_string()));
assert_eq!(
YoutubeDescriptor::from_url(&format!("https://www.youtube.com/user/jblow888{trailing_crud}")).unwrap(),
YoutubeDescriptor::User("jblow888".to_string()));
assert_eq!(
YoutubeDescriptor::from_url(&format!("https://www.youtube.com/user/jblow888/videos{trailing_crud}")).unwrap(),
YoutubeDescriptor::User("jblow888".to_string()));
}
for trailing_crud in ["", "&", "&stuff", "#", "#stuff"] {
assert_eq!(
YoutubeDescriptor::from_url(&format!("https://www.youtube.com/playlist?list=PL5AC656794EE191C1{trailing_crud}")).unwrap(),
YoutubeDescriptor::Playlist("PL5AC656794EE191C1".to_string()));
assert_eq!(
YoutubeDescriptor::from_url(&format!("https://www.youtube.com/playlist?list=PL78L-9twndz8fMRU3NpiWSmB5IucqWuTF{trailing_crud}")).unwrap(),
YoutubeDescriptor::Playlist("PL78L-9twndz8fMRU3NpiWSmB5IucqWuTF".to_string()));
assert_eq!(
YoutubeDescriptor::from_url(&format!("https://www.youtube.com/watch?v=YdSdvIRkkDY{trailing_crud}")).unwrap(),
YoutubeDescriptor::Video("YdSdvIRkkDY".to_string()));
assert_eq!(
YoutubeDescriptor::from_url(&format!("https://invidious.kavin.rocks/watch?v=YdSdvIRkkDY{trailing_crud}")).unwrap(),
YoutubeDescriptor::Video("YdSdvIRkkDY".to_string()));
}
for bad_url in [
"https://www.youtube.com/channel/UChBBWt5H8uZW1LSOh_aPt2",
"https://www.youtube.com/channel/UChBBWt5H8uZW1LSOh_aPt2Qa",
] {
let result = YoutubeDescriptor::from_url(bad_url);
assert!(result.expect_err("expected an error").to_string().starts_with("Unsupported URL: "));
}
}
#[test]
fn test_replace_matching_characters() {
assert_eq!(replace_matching_characters("io", ALPHA_REGULAR, ALPHA_CYRILLIC), "іо");
assert_eq!(replace_matching_characters("user", ALPHA_REGULAR, ALPHA_FRAKTUR), "𝔲𝔰𝔢𝔯");
assert_eq!(replace_matching_characters("User", ALPHA_REGULAR, ALPHA_FRAKTUR), "𝔘𝔰𝔢𝔯");
assert_eq!(replace_matching_characters("_User0", ALPHA_REGULAR, ALPHA_FRAKTUR), "_𝔘𝔰𝔢𝔯0");
}
}
|
/// CreateGPGKeyOption options create user GPG key
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct CreateGpgKeyOption {
/// An armored GPG key to add
pub armored_public_key: String,
}
impl CreateGpgKeyOption {
/// Create a builder for this object.
#[inline]
pub fn builder() -> CreateGpgKeyOptionBuilder<crate::generics::MissingArmoredPublicKey> {
CreateGpgKeyOptionBuilder {
body: Default::default(),
_armored_public_key: core::marker::PhantomData,
}
}
#[inline]
pub fn user_current_post_gpg_key() -> CreateGpgKeyOptionPostBuilder<crate::generics::MissingArmoredPublicKey> {
CreateGpgKeyOptionPostBuilder {
body: Default::default(),
_armored_public_key: core::marker::PhantomData,
}
}
}
impl Into<CreateGpgKeyOption> for CreateGpgKeyOptionBuilder<crate::generics::ArmoredPublicKeyExists> {
fn into(self) -> CreateGpgKeyOption {
self.body
}
}
impl Into<CreateGpgKeyOption> for CreateGpgKeyOptionPostBuilder<crate::generics::ArmoredPublicKeyExists> {
fn into(self) -> CreateGpgKeyOption {
self.body
}
}
/// Builder for [`CreateGpgKeyOption`](./struct.CreateGpgKeyOption.html) object.
#[derive(Debug, Clone)]
pub struct CreateGpgKeyOptionBuilder<ArmoredPublicKey> {
body: self::CreateGpgKeyOption,
_armored_public_key: core::marker::PhantomData<ArmoredPublicKey>,
}
impl<ArmoredPublicKey> CreateGpgKeyOptionBuilder<ArmoredPublicKey> {
/// An armored GPG key to add
#[inline]
pub fn armored_public_key(mut self, value: impl Into<String>) -> CreateGpgKeyOptionBuilder<crate::generics::ArmoredPublicKeyExists> {
self.body.armored_public_key = value.into();
unsafe { std::mem::transmute(self) }
}
}
/// Builder created by [`CreateGpgKeyOption::user_current_post_gpg_key`](./struct.CreateGpgKeyOption.html#method.user_current_post_gpg_key) method for a `POST` operation associated with `CreateGpgKeyOption`.
#[derive(Debug, Clone)]
pub struct CreateGpgKeyOptionPostBuilder<ArmoredPublicKey> {
body: self::CreateGpgKeyOption,
_armored_public_key: core::marker::PhantomData<ArmoredPublicKey>,
}
impl<ArmoredPublicKey> CreateGpgKeyOptionPostBuilder<ArmoredPublicKey> {
/// An armored GPG key to add
#[inline]
pub fn armored_public_key(mut self, value: impl Into<String>) -> CreateGpgKeyOptionPostBuilder<crate::generics::ArmoredPublicKeyExists> {
self.body.armored_public_key = value.into();
unsafe { std::mem::transmute(self) }
}
}
impl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for CreateGpgKeyOptionPostBuilder<crate::generics::ArmoredPublicKeyExists> {
type Output = crate::gpg_key::GpgKey;
const METHOD: http::Method = http::Method::POST;
fn rel_path(&self) -> std::borrow::Cow<'static, str> {
"/user/gpg_keys".into()
}
fn modify(&self, req: Client::Request) -> Result<Client::Request, crate::client::ApiError<Client::Response>> {
use crate::client::Request;
Ok(req
.json(&self.body))
}
}
impl crate::client::ResponseWrapper<crate::gpg_key::GpgKey, CreateGpgKeyOptionPostBuilder<crate::generics::ArmoredPublicKeyExists>> {
#[inline]
pub fn message(&self) -> Option<String> {
self.headers.get("message").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok())
}
#[inline]
pub fn url(&self) -> Option<String> {
self.headers.get("url").and_then(|v| String::from_utf8_lossy(v.as_ref()).parse().ok())
}
}
|
use futures::{Async, Poll, Stream};
use std::clone::Clone;
use std::sync::Arc;
use std::sync::Mutex;
use std::sync::MutexGuard;
use crate::structs::EveError;
pub struct StreamCopy<T, S: Stream<Item=T, Error=EveError>> {
input: S,
buffers: Vec<Vec<T>>,
idx: usize,
}
pub struct StreamCopyMutex<T, S: Stream<Item=T, Error=EveError>>(Arc<Mutex<StreamCopy<T, S>>>);
pub struct StreamCopyOutPort<T, S: Stream<Item=T, Error=EveError>> {
id: usize,
source: StreamCopyMutex<T, S>,
}
impl<T: Clone, S: Stream<Item=T, Error=EveError>> StreamCopy<T, S> {
fn poll(&mut self, id: usize) -> Poll<Option<T>, EveError> {
let buffered = self.buffered_poll(id);
match buffered {
Some(buffered) => {
Ok(Async::Ready(Some(buffered)))
}
None => {
let result = self.input.poll();
match result {
Ok(value) => {
match value {
Async::Ready(ready) => {
match ready {
Some(event) => {
for buffer in &mut self.buffers {
buffer.push(event.clone())
}
self.poll(id)
}
None => Ok(Async::Ready(None))
}
}
Async::NotReady => Ok(Async::NotReady)
}
}
Err(e) => Err(e)
}
}
}
}
fn buffered_poll(&mut self, id: usize) -> Option<T> {
let buffer = &mut self.buffers[id];
if buffer.len() > 0 {
Some(buffer.remove(0))
} else {
None
}
}
}
impl<T: Clone, S: Stream<Item=T, Error=EveError>> Stream for StreamCopyOutPort<T, S> {
type Item = T;
type Error = EveError;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
self.source.poll_locked(self.id)
}
}
impl<T, S: Stream<Item=T, Error=EveError>> Clone for StreamCopyMutex<T, S> {
fn clone(&self) -> StreamCopyMutex<T, S> {
StreamCopyMutex(self.0.clone())
}
}
impl<T: Clone, S: Stream<Item=T, Error=EveError>> StreamCopyMutex<T, S> {
pub fn new(input: S) -> StreamCopyMutex<T, S> {
StreamCopyMutex(
Arc::new(Mutex::new(StreamCopy {
input,
buffers: vec!(),
idx: 0,
})))
}
pub fn lock(&self) -> MutexGuard<StreamCopy<T, S>> {
self.0.lock().unwrap()
}
pub fn create_output_locked(&self) -> StreamCopyOutPort<T, S> {
let mut inner = self.lock();
let val = StreamCopyOutPort {
source: (*self).clone(),
id: inner.idx,
};
inner.buffers.push(vec!());
inner.idx += 1;
val
}
pub fn poll_locked(&self, id: usize) -> Poll<Option<T>, EveError> {
let mut inner = self.lock();
inner.poll(id)
}
} |
/// An enum to represent all characters in the BlockElements block.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
pub enum BlockElements {
/// \u{2580}: '▀'
UpperHalfBlock,
/// \u{2581}: '▁'
LowerOneEighthBlock,
/// \u{2582}: '▂'
LowerOneQuarterBlock,
/// \u{2583}: '▃'
LowerThreeEighthsBlock,
/// \u{2584}: '▄'
LowerHalfBlock,
/// \u{2585}: '▅'
LowerFiveEighthsBlock,
/// \u{2586}: '▆'
LowerThreeQuartersBlock,
/// \u{2587}: '▇'
LowerSevenEighthsBlock,
/// \u{2588}: '█'
FullBlock,
/// \u{2589}: '▉'
LeftSevenEighthsBlock,
/// \u{258a}: '▊'
LeftThreeQuartersBlock,
/// \u{258b}: '▋'
LeftFiveEighthsBlock,
/// \u{258c}: '▌'
LeftHalfBlock,
/// \u{258d}: '▍'
LeftThreeEighthsBlock,
/// \u{258e}: '▎'
LeftOneQuarterBlock,
/// \u{258f}: '▏'
LeftOneEighthBlock,
/// \u{2590}: '▐'
RightHalfBlock,
/// \u{2591}: '░'
LightShade,
/// \u{2592}: '▒'
MediumShade,
/// \u{2593}: '▓'
DarkShade,
/// \u{2594}: '▔'
UpperOneEighthBlock,
/// \u{2595}: '▕'
RightOneEighthBlock,
/// \u{2596}: '▖'
QuadrantLowerLeft,
/// \u{2597}: '▗'
QuadrantLowerRight,
/// \u{2598}: '▘'
QuadrantUpperLeft,
/// \u{2599}: '▙'
QuadrantUpperLeftAndLowerLeftAndLowerRight,
/// \u{259a}: '▚'
QuadrantUpperLeftAndLowerRight,
/// \u{259b}: '▛'
QuadrantUpperLeftAndUpperRightAndLowerLeft,
/// \u{259c}: '▜'
QuadrantUpperLeftAndUpperRightAndLowerRight,
/// \u{259d}: '▝'
QuadrantUpperRight,
/// \u{259e}: '▞'
QuadrantUpperRightAndLowerLeft,
}
impl Into<char> for BlockElements {
fn into(self) -> char {
match self {
BlockElements::UpperHalfBlock => '▀',
BlockElements::LowerOneEighthBlock => '▁',
BlockElements::LowerOneQuarterBlock => '▂',
BlockElements::LowerThreeEighthsBlock => '▃',
BlockElements::LowerHalfBlock => '▄',
BlockElements::LowerFiveEighthsBlock => '▅',
BlockElements::LowerThreeQuartersBlock => '▆',
BlockElements::LowerSevenEighthsBlock => '▇',
BlockElements::FullBlock => '█',
BlockElements::LeftSevenEighthsBlock => '▉',
BlockElements::LeftThreeQuartersBlock => '▊',
BlockElements::LeftFiveEighthsBlock => '▋',
BlockElements::LeftHalfBlock => '▌',
BlockElements::LeftThreeEighthsBlock => '▍',
BlockElements::LeftOneQuarterBlock => '▎',
BlockElements::LeftOneEighthBlock => '▏',
BlockElements::RightHalfBlock => '▐',
BlockElements::LightShade => '░',
BlockElements::MediumShade => '▒',
BlockElements::DarkShade => '▓',
BlockElements::UpperOneEighthBlock => '▔',
BlockElements::RightOneEighthBlock => '▕',
BlockElements::QuadrantLowerLeft => '▖',
BlockElements::QuadrantLowerRight => '▗',
BlockElements::QuadrantUpperLeft => '▘',
BlockElements::QuadrantUpperLeftAndLowerLeftAndLowerRight => '▙',
BlockElements::QuadrantUpperLeftAndLowerRight => '▚',
BlockElements::QuadrantUpperLeftAndUpperRightAndLowerLeft => '▛',
BlockElements::QuadrantUpperLeftAndUpperRightAndLowerRight => '▜',
BlockElements::QuadrantUpperRight => '▝',
BlockElements::QuadrantUpperRightAndLowerLeft => '▞',
}
}
}
impl std::convert::TryFrom<char> for BlockElements {
type Error = ();
fn try_from(c: char) -> Result<Self, Self::Error> {
match c {
'▀' => Ok(BlockElements::UpperHalfBlock),
'▁' => Ok(BlockElements::LowerOneEighthBlock),
'▂' => Ok(BlockElements::LowerOneQuarterBlock),
'▃' => Ok(BlockElements::LowerThreeEighthsBlock),
'▄' => Ok(BlockElements::LowerHalfBlock),
'▅' => Ok(BlockElements::LowerFiveEighthsBlock),
'▆' => Ok(BlockElements::LowerThreeQuartersBlock),
'▇' => Ok(BlockElements::LowerSevenEighthsBlock),
'█' => Ok(BlockElements::FullBlock),
'▉' => Ok(BlockElements::LeftSevenEighthsBlock),
'▊' => Ok(BlockElements::LeftThreeQuartersBlock),
'▋' => Ok(BlockElements::LeftFiveEighthsBlock),
'▌' => Ok(BlockElements::LeftHalfBlock),
'▍' => Ok(BlockElements::LeftThreeEighthsBlock),
'▎' => Ok(BlockElements::LeftOneQuarterBlock),
'▏' => Ok(BlockElements::LeftOneEighthBlock),
'▐' => Ok(BlockElements::RightHalfBlock),
'░' => Ok(BlockElements::LightShade),
'▒' => Ok(BlockElements::MediumShade),
'▓' => Ok(BlockElements::DarkShade),
'▔' => Ok(BlockElements::UpperOneEighthBlock),
'▕' => Ok(BlockElements::RightOneEighthBlock),
'▖' => Ok(BlockElements::QuadrantLowerLeft),
'▗' => Ok(BlockElements::QuadrantLowerRight),
'▘' => Ok(BlockElements::QuadrantUpperLeft),
'▙' => Ok(BlockElements::QuadrantUpperLeftAndLowerLeftAndLowerRight),
'▚' => Ok(BlockElements::QuadrantUpperLeftAndLowerRight),
'▛' => Ok(BlockElements::QuadrantUpperLeftAndUpperRightAndLowerLeft),
'▜' => Ok(BlockElements::QuadrantUpperLeftAndUpperRightAndLowerRight),
'▝' => Ok(BlockElements::QuadrantUpperRight),
'▞' => Ok(BlockElements::QuadrantUpperRightAndLowerLeft),
_ => Err(()),
}
}
}
impl Into<u32> for BlockElements {
fn into(self) -> u32 {
let c: char = self.into();
let hex = c
.escape_unicode()
.to_string()
.replace("\\u{", "")
.replace("}", "");
u32::from_str_radix(&hex, 16).unwrap()
}
}
impl std::convert::TryFrom<u32> for BlockElements {
type Error = ();
fn try_from(u: u32) -> Result<Self, Self::Error> {
if let Ok(c) = char::try_from(u) {
Self::try_from(c)
} else {
Err(())
}
}
}
impl Iterator for BlockElements {
type Item = Self;
fn next(&mut self) -> Option<Self> {
let index: u32 = (*self).into();
use std::convert::TryFrom;
Self::try_from(index + 1).ok()
}
}
impl BlockElements {
/// The character with the lowest index in this unicode block
pub fn new() -> Self {
BlockElements::UpperHalfBlock
}
/// The character's name, in sentence case
pub fn name(&self) -> String {
let s = std::format!("BlockElements{:#?}", self);
string_morph::to_sentence_case(&s)
}
}
|
use std::os::raw::c_char;
use std::slice;
pub fn c_char_to_unsigned(slice: &[c_char]) -> &[u8] {
let ptr = slice.as_ptr().cast::<u8>();
let len = slice.len();
unsafe { slice::from_raw_parts(ptr, len) }
}
pub fn unsigned_to_c_char(slice: &[u8]) -> &[c_char] {
let ptr = slice.as_ptr().cast::<c_char>();
let len = slice.len();
unsafe { slice::from_raw_parts(ptr, len) }
}
|
use rayon::iter::plumbing::{
bridge_producer_consumer, Folder, Producer, ProducerCallback, Reducer, UnindexedConsumer,
};
use rayon::prelude::*;
pub struct ByBlocks<I, S> {
pub(super) sizes: S,
pub(super) base: I,
}
struct BlocksCallback<S, C> {
sizes: S,
consumer: C,
len: usize,
}
impl<T, S, C> ProducerCallback<T> for BlocksCallback<S, C>
where
C: UnindexedConsumer<T>,
S: Iterator<Item = usize>,
{
type Output = C::Result;
fn callback<P: Producer<Item = T>>(mut self, mut producer: P) -> Self::Output {
let mut remaining_len = self.len;
let mut consumer = self.consumer;
// TODO: is it really the way to get to identity ?
let (left_consumer, right_consumer, _) = consumer.split_at(0);
let mut res = left_consumer.into_folder().complete();
consumer = right_consumer;
while remaining_len > 0 && !consumer.full() {
let size = self.sizes.next().unwrap_or(std::usize::MAX);
let real_size = remaining_len.min(size);
remaining_len -= real_size;
let (left, right) = producer.split_at(real_size);
producer = right;
// TODO: should we care about this reducer ?
// TODO: why on earth do we care about left and right consumers ?
let (left_consumer, right_consumer, _) = consumer.split_at(real_size);
consumer = right_consumer;
res = consumer.to_reducer().reduce(
res,
bridge_producer_consumer(real_size, left, left_consumer),
);
}
res
}
}
impl<I, S> ParallelIterator for ByBlocks<I, S>
where
I: IndexedParallelIterator,
S: Iterator<Item = usize> + Send,
{
type Item = I::Item;
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where
C: UnindexedConsumer<Self::Item>,
{
let len = self.base.len();
let callback = BlocksCallback {
consumer,
sizes: self.sizes,
len,
};
self.base.with_producer(callback)
}
}
|
use serde::Deserialize;
use serde::Serialize;
use std::collections::HashMap;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum Asset {
None,
Signature {
#[serde(rename = "publicKey")]
public_key: String,
},
Delegate {
username: String,
},
Votes(Vec<String>),
#[serde(rename = "multiSignature")]
MultiSignatureRegistration {
#[serde(rename = "publicKeys")]
public_keys: Vec<String>,
min: u8,
},
Ipfs(String),
Payments(Vec<Payment>),
Lock {
#[serde(rename = "secretHash")]
secret_hash: String,
expiration: Expiration,
},
Claim {
#[serde(rename = "lockTransactionId")]
lock_transaction_id: String,
#[serde(rename = "unlockSecret")]
unlock_secret: String,
},
#[serde(rename = "refund")]
Refund {
#[serde(rename = "lockTransactionId")]
lock_transaction_id: String,
},
#[serde(rename = "businessRegistration")]
BusinessRegistration {
name: String,
website: String,
},
#[serde(rename = "businessUpdate")]
BusinessUpdate {
name: String,
website: String,
},
#[serde(rename = "bridgechainRegistration")]
BridgeChainRegistration {
name: String,
#[serde(rename = "seedNodes")]
seed_nodes: Vec<String>,
#[serde(rename = "genesisHash")]
genesis_hash: String,
#[serde(rename = "bridgechainRepository")]
bridgechain_repository: String,
ports: HashMap<String, u32>,
},
#[serde(rename = "bridgechainUpdate")]
BridgechainUpdate {
#[serde(rename = "bridgechainId")]
bridgechain_id: String,
#[serde(rename = "seedNodes")]
seed_nodes: Vec<String>,
ports: HashMap<String, u32>,
},
#[serde(rename = "bridgechainResignation")]
BridgechainResignation {
#[serde(rename = "bridgechainId")]
bridgechain_id: String,
},
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Expiration {
#[serde(rename = "type")]
pub expiration_type: u64,
pub value: u64,
}
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Payment {
pub amount: String,
pub recipient_id: String,
}
impl Asset {
pub fn is_none(&self) -> bool {
match *self {
Asset::None => true,
_ => false,
}
}
}
impl Default for Asset {
fn default() -> Self {
Asset::None
}
}
|
use crate::geometry::{Rect, Size};
use num_traits::{AsPrimitive, Float, NumCast, PrimInt};
use std::ops;
/// Defines a position in 2D cartesian coordinates.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)]
#[repr(C)]
pub struct Point<T> {
/// Distance from the left edge.
pub x: T,
/// Distance from the top edge.
pub y: T,
}
impl<T> Point<T> {
#[inline]
pub const fn new(x: T, y: T) -> Self {
Point { x, y }
}
#[inline]
pub fn with_x(self, x: T) -> Self {
Point { x, y: self.y }
}
#[inline]
pub fn with_y(self, y: T) -> Self {
Point { x: self.x, y }
}
#[inline]
pub fn offset(self, dx: T, dy: T) -> Self
where
T: ops::Add<Output = T>,
{
Point {
x: self.x + dx,
y: self.y + dy,
}
}
#[inline]
pub fn inside(self, rect: Rect) -> bool
where
T: NumCast,
{
if let Some(p) = self.cast_checked::<i32>() {
p.x >= rect.x() && p.x <= rect.end_x() && p.y >= rect.y() && p.y <= rect.end_y()
} else {
false
}
}
#[inline]
pub fn map<F, R>(self, mut f: F) -> Point<R>
where
F: FnMut(T) -> R,
{
Point {
x: f(self.x),
y: f(self.y),
}
}
#[inline]
pub fn map2<F, U, R>(self, other: Point<U>, mut f: F) -> Point<R>
where
F: FnMut(T, U) -> R,
{
Point {
x: f(self.x, other.x),
y: f(self.y, other.y),
}
}
#[inline]
pub fn map_mut<F>(&mut self, mut f: F)
where
F: FnMut(&mut T),
{
f(&mut self.x);
f(&mut self.y);
}
#[inline]
pub fn map2_mut<F, U>(&mut self, other: Point<U>, mut f: F)
where
F: FnMut(&mut T, U),
{
f(&mut self.x, other.x);
f(&mut self.y, other.y);
}
#[inline]
pub fn cast<R>(self) -> Point<R>
where
T: AsPrimitive<R>,
R: Copy + 'static,
{
Point {
x: self.x.as_(),
y: self.y.as_(),
}
}
#[inline]
pub fn cast_checked<R>(self) -> Option<Point<R>>
where
T: NumCast,
R: NumCast,
{
Some(Point {
x: num_traits::cast(self.x)?,
y: num_traits::cast(self.y)?,
})
}
}
impl<T: PrimInt + AsPrimitive<u32>> Point<T> {
#[inline]
pub fn as_size(self) -> Size {
Size {
w: self.x.max(T::zero()).as_(),
h: self.y.max(T::zero()).as_(),
}
}
}
impl<T: Float> Point<T> {
/// Creates a new point from radial coordinates.
#[inline]
pub fn new_radial(radius: T, angle: T) -> Self {
Point {
x: radius * angle.cos(),
y: radius * angle.sin(),
}
}
/// Calculates the distance between two points.
#[inline]
pub fn distance_to(self, other: Self) -> T {
let d = other - self;
d.x.hypot(d.y)
}
/// Rotate this point around the origin.
#[inline]
pub fn rotate_origin(self, angle: T) -> Self {
let cos = angle.cos();
let sin = angle.sin();
Point {
x: self.x * cos - self.y * sin,
y: self.x * sin + self.y * cos,
}
}
/// Rotate this point around another point.
#[inline]
pub fn rotate(self, center: Self, angle: T) -> Self {
(self - center).rotate_origin(angle) + center
}
/// Interpolate between two points.
#[inline]
pub fn interpolate(self, other: Self, a: T) -> Self {
self * (T::one() - a) + other * a
}
}
impl<T: Copy + 'static, S: AsPrimitive<T>> From<[S; 2]> for Point<T> {
#[inline]
fn from([x, y]: [S; 2]) -> Self {
Self { x: x.as_(), y: y.as_() }
}
}
impl<T: Copy + 'static, S: AsPrimitive<T>> From<(S, S)> for Point<T> {
#[inline]
fn from((x, y): (S, S)) -> Self {
Self { x: x.as_(), y: y.as_() }
}
}
impl<T> From<Point<T>> for [T; 2] {
#[inline]
fn from(p: Point<T>) -> Self {
[p.x, p.y]
}
}
impl<T> From<Point<T>> for (T, T) {
#[inline]
fn from(p: Point<T>) -> Self {
(p.x, p.y)
}
}
impl<T: Default> From<()> for Point<T> {
#[inline]
fn from(_: ()) -> Self {
Default::default()
}
}
impl<T> ops::Add for Point<T>
where
T: ops::Add<Output = T>,
{
type Output = Self;
#[inline]
fn add(self, rhs: Self) -> Self::Output {
self.map2(rhs, ops::Add::add)
}
}
impl<T> ops::Sub for Point<T>
where
T: ops::Sub<Output = T>,
{
type Output = Self;
#[inline]
fn sub(self, rhs: Self) -> Self::Output {
self.map2(rhs, ops::Sub::sub)
}
}
impl<T> ops::Mul<T> for Point<T>
where
T: ops::Mul<Output = T> + Copy,
{
type Output = Self;
#[inline]
fn mul(self, rhs: T) -> Self::Output {
self.map(|a| a * rhs)
}
}
impl<T> ops::Div<T> for Point<T>
where
T: ops::Div<Output = T> + Copy,
{
type Output = Self;
#[inline]
fn div(self, rhs: T) -> Self::Output {
self.map(|a| a / rhs)
}
}
impl<T> ops::Div for Point<T>
where
T: ops::Div<Output = T> + Copy,
{
type Output = Self;
#[inline]
fn div(self, rhs: Self) -> Self::Output {
self.map2(rhs, ops::Div::div)
}
}
impl<T> ops::Rem<T> for Point<T>
where
T: ops::Rem<Output = T> + Copy,
{
type Output = Self;
#[inline]
fn rem(self, rhs: T) -> Self::Output {
self.map(|a| a % rhs)
}
}
impl<T> ops::AddAssign for Point<T>
where
T: ops::AddAssign,
{
#[inline]
fn add_assign(&mut self, rhs: Self) {
self.map2_mut(rhs, std::ops::AddAssign::add_assign)
}
}
impl<T> ops::SubAssign for Point<T>
where
T: ops::SubAssign,
{
#[inline]
fn sub_assign(&mut self, rhs: Self) {
self.map2_mut(rhs, std::ops::SubAssign::sub_assign)
}
}
impl<T> ops::MulAssign<T> for Point<T>
where
T: ops::MulAssign + Copy,
{
#[inline]
fn mul_assign(&mut self, rhs: T) {
self.map_mut(|a| *a *= rhs);
}
}
impl<T> ops::DivAssign<T> for Point<T>
where
T: ops::DivAssign + Copy,
{
#[inline]
fn div_assign(&mut self, rhs: T) {
self.map_mut(|a| *a /= rhs);
}
}
impl<T> ops::RemAssign<T> for Point<T>
where
T: ops::RemAssign + Copy,
{
#[inline]
fn rem_assign(&mut self, rhs: T) {
self.map_mut(|a| *a %= rhs);
}
}
impl<T> ops::Neg for Point<T>
where
T: ops::Neg<Output = T>,
{
type Output = Self;
#[inline]
fn neg(self) -> Self::Output {
Self { x: -self.x, y: -self.y }
}
}
|
use std::marker::PhantomData;
pub trait Key {
type Value;
}
pub struct RankKey<K>(PhantomData<K>);
impl<K> Key for RankKey<K> {
type Value = mpi::topology::Rank;
}
|
pub mod window;
pub mod renderer;
pub mod widgets; |
fn main() {
let parsed: i32 = "5".parse().unwrap();
println!("parsed {:?}", parsed);
}
|
pub fn problem_009() -> u64 {
let n = 1000;
for i in 0..n {
for j in i..((n - 1) / 2) {
let k = n - i - j;
if i * i + j * j == k * k {
return i * j * k;
}
}
}
0
}
#[cfg(test)]
mod test {
use super::*;
use test::Bencher;
#[test]
fn test_problem_009() {
let ans: u64 = problem_009();
println!("Answer to Problem 9: {}", ans);
assert!(ans == 31875000)
}
#[bench]
fn bench_problem_009(b: &mut Bencher) {
b.iter(|| problem_009());
}
}
|
//tuples are collection of values that are different types
pub fn tuple_fn(){
//tuple
let city : (&str,i32,&str) = ("Belgrade",2000000,"Serbia");
println!("I live in {0} it is the capital of {2}, and {0} has {1} people living",city.0,city.1,city.2);
} |
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub fn serialize_structure_add_layer_version_permission_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::AddLayerVersionPermissionInput,
) {
if let Some(var_1) = &input.statement_id {
object.key("StatementId").string(var_1);
}
if let Some(var_2) = &input.action {
object.key("Action").string(var_2);
}
if let Some(var_3) = &input.principal {
object.key("Principal").string(var_3);
}
if let Some(var_4) = &input.organization_id {
object.key("OrganizationId").string(var_4);
}
}
pub fn serialize_structure_add_permission_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::AddPermissionInput,
) {
if let Some(var_5) = &input.statement_id {
object.key("StatementId").string(var_5);
}
if let Some(var_6) = &input.action {
object.key("Action").string(var_6);
}
if let Some(var_7) = &input.principal {
object.key("Principal").string(var_7);
}
if let Some(var_8) = &input.source_arn {
object.key("SourceArn").string(var_8);
}
if let Some(var_9) = &input.source_account {
object.key("SourceAccount").string(var_9);
}
if let Some(var_10) = &input.event_source_token {
object.key("EventSourceToken").string(var_10);
}
if let Some(var_11) = &input.revision_id {
object.key("RevisionId").string(var_11);
}
}
pub fn serialize_structure_create_alias_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::CreateAliasInput,
) {
if let Some(var_12) = &input.name {
object.key("Name").string(var_12);
}
if let Some(var_13) = &input.function_version {
object.key("FunctionVersion").string(var_13);
}
if let Some(var_14) = &input.description {
object.key("Description").string(var_14);
}
if let Some(var_15) = &input.routing_config {
let mut object_16 = object.key("RoutingConfig").start_object();
crate::json_ser::serialize_structure_alias_routing_configuration(&mut object_16, var_15);
object_16.finish();
}
}
pub fn serialize_structure_create_code_signing_config_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::CreateCodeSigningConfigInput,
) {
if let Some(var_17) = &input.description {
object.key("Description").string(var_17);
}
if let Some(var_18) = &input.allowed_publishers {
let mut object_19 = object.key("AllowedPublishers").start_object();
crate::json_ser::serialize_structure_allowed_publishers(&mut object_19, var_18);
object_19.finish();
}
if let Some(var_20) = &input.code_signing_policies {
let mut object_21 = object.key("CodeSigningPolicies").start_object();
crate::json_ser::serialize_structure_code_signing_policies(&mut object_21, var_20);
object_21.finish();
}
}
pub fn serialize_structure_create_event_source_mapping_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::CreateEventSourceMappingInput,
) {
if let Some(var_22) = &input.event_source_arn {
object.key("EventSourceArn").string(var_22);
}
if let Some(var_23) = &input.function_name {
object.key("FunctionName").string(var_23);
}
if let Some(var_24) = &input.enabled {
object.key("Enabled").boolean(*var_24);
}
if let Some(var_25) = &input.batch_size {
object.key("BatchSize").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_25).into()),
);
}
if let Some(var_26) = &input.maximum_batching_window_in_seconds {
object.key("MaximumBatchingWindowInSeconds").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_26).into()),
);
}
if let Some(var_27) = &input.parallelization_factor {
object.key("ParallelizationFactor").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_27).into()),
);
}
if let Some(var_28) = &input.starting_position {
object.key("StartingPosition").string(var_28.as_str());
}
if let Some(var_29) = &input.starting_position_timestamp {
object
.key("StartingPositionTimestamp")
.instant(var_29, smithy_types::instant::Format::EpochSeconds);
}
if let Some(var_30) = &input.destination_config {
let mut object_31 = object.key("DestinationConfig").start_object();
crate::json_ser::serialize_structure_destination_config(&mut object_31, var_30);
object_31.finish();
}
if let Some(var_32) = &input.maximum_record_age_in_seconds {
object.key("MaximumRecordAgeInSeconds").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_32).into()),
);
}
if let Some(var_33) = &input.bisect_batch_on_function_error {
object.key("BisectBatchOnFunctionError").boolean(*var_33);
}
if let Some(var_34) = &input.maximum_retry_attempts {
object.key("MaximumRetryAttempts").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_34).into()),
);
}
if let Some(var_35) = &input.tumbling_window_in_seconds {
object.key("TumblingWindowInSeconds").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_35).into()),
);
}
if let Some(var_36) = &input.topics {
let mut array_37 = object.key("Topics").start_array();
for item_38 in var_36 {
array_37.value().string(item_38);
}
array_37.finish();
}
if let Some(var_39) = &input.queues {
let mut array_40 = object.key("Queues").start_array();
for item_41 in var_39 {
array_40.value().string(item_41);
}
array_40.finish();
}
if let Some(var_42) = &input.source_access_configurations {
let mut array_43 = object.key("SourceAccessConfigurations").start_array();
for item_44 in var_42 {
let mut object_45 = array_43.value().start_object();
crate::json_ser::serialize_structure_source_access_configuration(
&mut object_45,
item_44,
);
object_45.finish();
}
array_43.finish();
}
if let Some(var_46) = &input.self_managed_event_source {
let mut object_47 = object.key("SelfManagedEventSource").start_object();
crate::json_ser::serialize_structure_self_managed_event_source(&mut object_47, var_46);
object_47.finish();
}
if let Some(var_48) = &input.function_response_types {
let mut array_49 = object.key("FunctionResponseTypes").start_array();
for item_50 in var_48 {
array_49.value().string(item_50.as_str());
}
array_49.finish();
}
}
pub fn serialize_structure_create_function_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::CreateFunctionInput,
) {
if let Some(var_51) = &input.function_name {
object.key("FunctionName").string(var_51);
}
if let Some(var_52) = &input.runtime {
object.key("Runtime").string(var_52.as_str());
}
if let Some(var_53) = &input.role {
object.key("Role").string(var_53);
}
if let Some(var_54) = &input.handler {
object.key("Handler").string(var_54);
}
if let Some(var_55) = &input.code {
let mut object_56 = object.key("Code").start_object();
crate::json_ser::serialize_structure_function_code(&mut object_56, var_55);
object_56.finish();
}
if let Some(var_57) = &input.description {
object.key("Description").string(var_57);
}
if let Some(var_58) = &input.timeout {
object.key("Timeout").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_58).into()),
);
}
if let Some(var_59) = &input.memory_size {
object.key("MemorySize").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_59).into()),
);
}
object.key("Publish").boolean(input.publish);
if let Some(var_60) = &input.vpc_config {
let mut object_61 = object.key("VpcConfig").start_object();
crate::json_ser::serialize_structure_vpc_config(&mut object_61, var_60);
object_61.finish();
}
if let Some(var_62) = &input.package_type {
object.key("PackageType").string(var_62.as_str());
}
if let Some(var_63) = &input.dead_letter_config {
let mut object_64 = object.key("DeadLetterConfig").start_object();
crate::json_ser::serialize_structure_dead_letter_config(&mut object_64, var_63);
object_64.finish();
}
if let Some(var_65) = &input.environment {
let mut object_66 = object.key("Environment").start_object();
crate::json_ser::serialize_structure_environment(&mut object_66, var_65);
object_66.finish();
}
if let Some(var_67) = &input.kms_key_arn {
object.key("KMSKeyArn").string(var_67);
}
if let Some(var_68) = &input.tracing_config {
let mut object_69 = object.key("TracingConfig").start_object();
crate::json_ser::serialize_structure_tracing_config(&mut object_69, var_68);
object_69.finish();
}
if let Some(var_70) = &input.tags {
let mut object_71 = object.key("Tags").start_object();
for (key_72, value_73) in var_70 {
object_71.key(key_72).string(value_73);
}
object_71.finish();
}
if let Some(var_74) = &input.layers {
let mut array_75 = object.key("Layers").start_array();
for item_76 in var_74 {
array_75.value().string(item_76);
}
array_75.finish();
}
if let Some(var_77) = &input.file_system_configs {
let mut array_78 = object.key("FileSystemConfigs").start_array();
for item_79 in var_77 {
let mut object_80 = array_78.value().start_object();
crate::json_ser::serialize_structure_file_system_config(&mut object_80, item_79);
object_80.finish();
}
array_78.finish();
}
if let Some(var_81) = &input.image_config {
let mut object_82 = object.key("ImageConfig").start_object();
crate::json_ser::serialize_structure_image_config(&mut object_82, var_81);
object_82.finish();
}
if let Some(var_83) = &input.code_signing_config_arn {
object.key("CodeSigningConfigArn").string(var_83);
}
}
pub fn serialize_structure_publish_layer_version_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::PublishLayerVersionInput,
) {
if let Some(var_84) = &input.description {
object.key("Description").string(var_84);
}
if let Some(var_85) = &input.content {
let mut object_86 = object.key("Content").start_object();
crate::json_ser::serialize_structure_layer_version_content_input(&mut object_86, var_85);
object_86.finish();
}
if let Some(var_87) = &input.compatible_runtimes {
let mut array_88 = object.key("CompatibleRuntimes").start_array();
for item_89 in var_87 {
array_88.value().string(item_89.as_str());
}
array_88.finish();
}
if let Some(var_90) = &input.license_info {
object.key("LicenseInfo").string(var_90);
}
}
pub fn serialize_structure_publish_version_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::PublishVersionInput,
) {
if let Some(var_91) = &input.code_sha256 {
object.key("CodeSha256").string(var_91);
}
if let Some(var_92) = &input.description {
object.key("Description").string(var_92);
}
if let Some(var_93) = &input.revision_id {
object.key("RevisionId").string(var_93);
}
}
pub fn serialize_structure_put_function_code_signing_config_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::PutFunctionCodeSigningConfigInput,
) {
if let Some(var_94) = &input.code_signing_config_arn {
object.key("CodeSigningConfigArn").string(var_94);
}
}
pub fn serialize_structure_put_function_concurrency_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::PutFunctionConcurrencyInput,
) {
if let Some(var_95) = &input.reserved_concurrent_executions {
object.key("ReservedConcurrentExecutions").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_95).into()),
);
}
}
pub fn serialize_structure_put_function_event_invoke_config_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::PutFunctionEventInvokeConfigInput,
) {
if let Some(var_96) = &input.maximum_retry_attempts {
object.key("MaximumRetryAttempts").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_96).into()),
);
}
if let Some(var_97) = &input.maximum_event_age_in_seconds {
object.key("MaximumEventAgeInSeconds").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_97).into()),
);
}
if let Some(var_98) = &input.destination_config {
let mut object_99 = object.key("DestinationConfig").start_object();
crate::json_ser::serialize_structure_destination_config(&mut object_99, var_98);
object_99.finish();
}
}
pub fn serialize_structure_put_provisioned_concurrency_config_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::PutProvisionedConcurrencyConfigInput,
) {
if let Some(var_100) = &input.provisioned_concurrent_executions {
object.key("ProvisionedConcurrentExecutions").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_100).into()),
);
}
}
pub fn serialize_structure_tag_resource_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::TagResourceInput,
) {
if let Some(var_101) = &input.tags {
let mut object_102 = object.key("Tags").start_object();
for (key_103, value_104) in var_101 {
object_102.key(key_103).string(value_104);
}
object_102.finish();
}
}
pub fn serialize_structure_update_alias_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::UpdateAliasInput,
) {
if let Some(var_105) = &input.function_version {
object.key("FunctionVersion").string(var_105);
}
if let Some(var_106) = &input.description {
object.key("Description").string(var_106);
}
if let Some(var_107) = &input.routing_config {
let mut object_108 = object.key("RoutingConfig").start_object();
crate::json_ser::serialize_structure_alias_routing_configuration(&mut object_108, var_107);
object_108.finish();
}
if let Some(var_109) = &input.revision_id {
object.key("RevisionId").string(var_109);
}
}
pub fn serialize_structure_update_code_signing_config_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::UpdateCodeSigningConfigInput,
) {
if let Some(var_110) = &input.description {
object.key("Description").string(var_110);
}
if let Some(var_111) = &input.allowed_publishers {
let mut object_112 = object.key("AllowedPublishers").start_object();
crate::json_ser::serialize_structure_allowed_publishers(&mut object_112, var_111);
object_112.finish();
}
if let Some(var_113) = &input.code_signing_policies {
let mut object_114 = object.key("CodeSigningPolicies").start_object();
crate::json_ser::serialize_structure_code_signing_policies(&mut object_114, var_113);
object_114.finish();
}
}
pub fn serialize_structure_update_event_source_mapping_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::UpdateEventSourceMappingInput,
) {
if let Some(var_115) = &input.function_name {
object.key("FunctionName").string(var_115);
}
if let Some(var_116) = &input.enabled {
object.key("Enabled").boolean(*var_116);
}
if let Some(var_117) = &input.batch_size {
object.key("BatchSize").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_117).into()),
);
}
if let Some(var_118) = &input.maximum_batching_window_in_seconds {
object.key("MaximumBatchingWindowInSeconds").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_118).into()),
);
}
if let Some(var_119) = &input.destination_config {
let mut object_120 = object.key("DestinationConfig").start_object();
crate::json_ser::serialize_structure_destination_config(&mut object_120, var_119);
object_120.finish();
}
if let Some(var_121) = &input.maximum_record_age_in_seconds {
object.key("MaximumRecordAgeInSeconds").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_121).into()),
);
}
if let Some(var_122) = &input.bisect_batch_on_function_error {
object.key("BisectBatchOnFunctionError").boolean(*var_122);
}
if let Some(var_123) = &input.maximum_retry_attempts {
object.key("MaximumRetryAttempts").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_123).into()),
);
}
if let Some(var_124) = &input.parallelization_factor {
object.key("ParallelizationFactor").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_124).into()),
);
}
if let Some(var_125) = &input.source_access_configurations {
let mut array_126 = object.key("SourceAccessConfigurations").start_array();
for item_127 in var_125 {
let mut object_128 = array_126.value().start_object();
crate::json_ser::serialize_structure_source_access_configuration(
&mut object_128,
item_127,
);
object_128.finish();
}
array_126.finish();
}
if let Some(var_129) = &input.tumbling_window_in_seconds {
object.key("TumblingWindowInSeconds").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_129).into()),
);
}
if let Some(var_130) = &input.function_response_types {
let mut array_131 = object.key("FunctionResponseTypes").start_array();
for item_132 in var_130 {
array_131.value().string(item_132.as_str());
}
array_131.finish();
}
}
pub fn serialize_structure_update_function_code_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::UpdateFunctionCodeInput,
) {
if let Some(var_133) = &input.zip_file {
object
.key("ZipFile")
.string_unchecked(&smithy_http::base64::encode(var_133));
}
if let Some(var_134) = &input.s3_bucket {
object.key("S3Bucket").string(var_134);
}
if let Some(var_135) = &input.s3_key {
object.key("S3Key").string(var_135);
}
if let Some(var_136) = &input.s3_object_version {
object.key("S3ObjectVersion").string(var_136);
}
if let Some(var_137) = &input.image_uri {
object.key("ImageUri").string(var_137);
}
object.key("Publish").boolean(input.publish);
object.key("DryRun").boolean(input.dry_run);
if let Some(var_138) = &input.revision_id {
object.key("RevisionId").string(var_138);
}
}
pub fn serialize_structure_update_function_configuration_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::UpdateFunctionConfigurationInput,
) {
if let Some(var_139) = &input.role {
object.key("Role").string(var_139);
}
if let Some(var_140) = &input.handler {
object.key("Handler").string(var_140);
}
if let Some(var_141) = &input.description {
object.key("Description").string(var_141);
}
if let Some(var_142) = &input.timeout {
object.key("Timeout").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_142).into()),
);
}
if let Some(var_143) = &input.memory_size {
object.key("MemorySize").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_143).into()),
);
}
if let Some(var_144) = &input.vpc_config {
let mut object_145 = object.key("VpcConfig").start_object();
crate::json_ser::serialize_structure_vpc_config(&mut object_145, var_144);
object_145.finish();
}
if let Some(var_146) = &input.environment {
let mut object_147 = object.key("Environment").start_object();
crate::json_ser::serialize_structure_environment(&mut object_147, var_146);
object_147.finish();
}
if let Some(var_148) = &input.runtime {
object.key("Runtime").string(var_148.as_str());
}
if let Some(var_149) = &input.dead_letter_config {
let mut object_150 = object.key("DeadLetterConfig").start_object();
crate::json_ser::serialize_structure_dead_letter_config(&mut object_150, var_149);
object_150.finish();
}
if let Some(var_151) = &input.kms_key_arn {
object.key("KMSKeyArn").string(var_151);
}
if let Some(var_152) = &input.tracing_config {
let mut object_153 = object.key("TracingConfig").start_object();
crate::json_ser::serialize_structure_tracing_config(&mut object_153, var_152);
object_153.finish();
}
if let Some(var_154) = &input.revision_id {
object.key("RevisionId").string(var_154);
}
if let Some(var_155) = &input.layers {
let mut array_156 = object.key("Layers").start_array();
for item_157 in var_155 {
array_156.value().string(item_157);
}
array_156.finish();
}
if let Some(var_158) = &input.file_system_configs {
let mut array_159 = object.key("FileSystemConfigs").start_array();
for item_160 in var_158 {
let mut object_161 = array_159.value().start_object();
crate::json_ser::serialize_structure_file_system_config(&mut object_161, item_160);
object_161.finish();
}
array_159.finish();
}
if let Some(var_162) = &input.image_config {
let mut object_163 = object.key("ImageConfig").start_object();
crate::json_ser::serialize_structure_image_config(&mut object_163, var_162);
object_163.finish();
}
}
pub fn serialize_structure_update_function_event_invoke_config_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::input::UpdateFunctionEventInvokeConfigInput,
) {
if let Some(var_164) = &input.maximum_retry_attempts {
object.key("MaximumRetryAttempts").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_164).into()),
);
}
if let Some(var_165) = &input.maximum_event_age_in_seconds {
object.key("MaximumEventAgeInSeconds").number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::NegInt((*var_165).into()),
);
}
if let Some(var_166) = &input.destination_config {
let mut object_167 = object.key("DestinationConfig").start_object();
crate::json_ser::serialize_structure_destination_config(&mut object_167, var_166);
object_167.finish();
}
}
pub fn serialize_structure_alias_routing_configuration(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AliasRoutingConfiguration,
) {
if let Some(var_168) = &input.additional_version_weights {
let mut object_169 = object.key("AdditionalVersionWeights").start_object();
for (key_170, value_171) in var_168 {
object_169.key(key_170).number(
#[allow(clippy::useless_conversion)]
smithy_types::Number::Float((*value_171).into()),
);
}
object_169.finish();
}
}
pub fn serialize_structure_allowed_publishers(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::AllowedPublishers,
) {
if let Some(var_172) = &input.signing_profile_version_arns {
let mut array_173 = object.key("SigningProfileVersionArns").start_array();
for item_174 in var_172 {
array_173.value().string(item_174);
}
array_173.finish();
}
}
pub fn serialize_structure_code_signing_policies(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::CodeSigningPolicies,
) {
if let Some(var_175) = &input.untrusted_artifact_on_deployment {
object
.key("UntrustedArtifactOnDeployment")
.string(var_175.as_str());
}
}
pub fn serialize_structure_destination_config(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DestinationConfig,
) {
if let Some(var_176) = &input.on_success {
let mut object_177 = object.key("OnSuccess").start_object();
crate::json_ser::serialize_structure_on_success(&mut object_177, var_176);
object_177.finish();
}
if let Some(var_178) = &input.on_failure {
let mut object_179 = object.key("OnFailure").start_object();
crate::json_ser::serialize_structure_on_failure(&mut object_179, var_178);
object_179.finish();
}
}
pub fn serialize_structure_source_access_configuration(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::SourceAccessConfiguration,
) {
if let Some(var_180) = &input.r#type {
object.key("Type").string(var_180.as_str());
}
if let Some(var_181) = &input.uri {
object.key("URI").string(var_181);
}
}
pub fn serialize_structure_self_managed_event_source(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::SelfManagedEventSource,
) {
if let Some(var_182) = &input.endpoints {
let mut object_183 = object.key("Endpoints").start_object();
for (key_184, value_185) in var_182 {
let mut array_186 = object_183.key(key_184.as_str()).start_array();
for item_187 in value_185 {
array_186.value().string(item_187);
}
array_186.finish();
}
object_183.finish();
}
}
pub fn serialize_structure_function_code(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::FunctionCode,
) {
if let Some(var_188) = &input.zip_file {
object
.key("ZipFile")
.string_unchecked(&smithy_http::base64::encode(var_188));
}
if let Some(var_189) = &input.s3_bucket {
object.key("S3Bucket").string(var_189);
}
if let Some(var_190) = &input.s3_key {
object.key("S3Key").string(var_190);
}
if let Some(var_191) = &input.s3_object_version {
object.key("S3ObjectVersion").string(var_191);
}
if let Some(var_192) = &input.image_uri {
object.key("ImageUri").string(var_192);
}
}
pub fn serialize_structure_vpc_config(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::VpcConfig,
) {
if let Some(var_193) = &input.subnet_ids {
let mut array_194 = object.key("SubnetIds").start_array();
for item_195 in var_193 {
array_194.value().string(item_195);
}
array_194.finish();
}
if let Some(var_196) = &input.security_group_ids {
let mut array_197 = object.key("SecurityGroupIds").start_array();
for item_198 in var_196 {
array_197.value().string(item_198);
}
array_197.finish();
}
}
pub fn serialize_structure_dead_letter_config(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::DeadLetterConfig,
) {
if let Some(var_199) = &input.target_arn {
object.key("TargetArn").string(var_199);
}
}
pub fn serialize_structure_environment(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::Environment,
) {
if let Some(var_200) = &input.variables {
let mut object_201 = object.key("Variables").start_object();
for (key_202, value_203) in var_200 {
object_201.key(key_202).string(value_203);
}
object_201.finish();
}
}
pub fn serialize_structure_tracing_config(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::TracingConfig,
) {
if let Some(var_204) = &input.mode {
object.key("Mode").string(var_204.as_str());
}
}
pub fn serialize_structure_file_system_config(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::FileSystemConfig,
) {
if let Some(var_205) = &input.arn {
object.key("Arn").string(var_205);
}
if let Some(var_206) = &input.local_mount_path {
object.key("LocalMountPath").string(var_206);
}
}
pub fn serialize_structure_image_config(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::ImageConfig,
) {
if let Some(var_207) = &input.entry_point {
let mut array_208 = object.key("EntryPoint").start_array();
for item_209 in var_207 {
array_208.value().string(item_209);
}
array_208.finish();
}
if let Some(var_210) = &input.command {
let mut array_211 = object.key("Command").start_array();
for item_212 in var_210 {
array_211.value().string(item_212);
}
array_211.finish();
}
if let Some(var_213) = &input.working_directory {
object.key("WorkingDirectory").string(var_213);
}
}
pub fn serialize_structure_layer_version_content_input(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::LayerVersionContentInput,
) {
if let Some(var_214) = &input.s3_bucket {
object.key("S3Bucket").string(var_214);
}
if let Some(var_215) = &input.s3_key {
object.key("S3Key").string(var_215);
}
if let Some(var_216) = &input.s3_object_version {
object.key("S3ObjectVersion").string(var_216);
}
if let Some(var_217) = &input.zip_file {
object
.key("ZipFile")
.string_unchecked(&smithy_http::base64::encode(var_217));
}
}
pub fn serialize_structure_on_success(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::OnSuccess,
) {
if let Some(var_218) = &input.destination {
object.key("Destination").string(var_218);
}
}
pub fn serialize_structure_on_failure(
object: &mut smithy_json::serialize::JsonObjectWriter,
input: &crate::model::OnFailure,
) {
if let Some(var_219) = &input.destination {
object.key("Destination").string(var_219);
}
}
|
use std::option::Option;
use crate::n_peekable::NPeekable;
struct Scanner<'a> {
line: i32,
cur_lexeme: String,
char_iter: NPeekable<'a>
}
impl<'a> Scanner<'a> {
fn new(source: &'a str) -> Scanner {
Scanner { line: 0, cur_lexeme: String::new(), char_iter: NPeekable::new(source) }
}
fn has_next(&mut self) -> bool {
self.char_iter.peek().is_some()
}
// FIXME the usage of advance might suggest, that we don't need to return anything from here
// rethink interface
fn advance(&mut self) -> Option<char> {
let result = self.char_iter.next();
self.cur_lexeme.extend(result);
result
}
fn skip_whitespace(&mut self) {
while self.char_iter.peek().map_or(false, |c| c.is_whitespace()) {
let c = self.char_iter.next().unwrap();
if c == '\n' {
self.line += 1;
}
}
}
fn scan_token(&mut self) -> Option<TokenType> {
self.skip_whitespace();
let a = self.char_iter.peek()?;
assert!(!a.is_whitespace());
let token_type = if a.is_digit(10) {
self.scan_number()
} else if a.is_alphabetic() {
self.scan_identifier()
} else if *a == '"' {
self.read_string()
} else {
// FIXME too deeply nested here
let c = self.advance()?;
match c {
'(' => Some(TokenType::LEFT_PAREN),
')' => Some(TokenType::RIGHT_PAREN),
'{' => Some(TokenType::LEFT_BRACE),
'}' => Some(TokenType::RIGHT_BRACE),
',' => Some(TokenType::COMMA),
'.' => Some(TokenType::DOT),
'-' => Some(TokenType::MINUS),
'+' => Some(TokenType::PLUS),
';' => Some(TokenType::SEMICOLON),
'*' => Some(TokenType::STAR),
'/' => if self.next_char_matches('/') {
while self.has_next() && !self.next_char_matches('\n') {
self.advance();
}
self.cur_lexeme = "".to_string(); // overwrite lexeme, comments don't matter
Some(TokenType::COMMENT)
} else {
Some(TokenType::SLASH)
},
_ => {
if self.next_char_matches('=') {
self.advance();
match c {
'!' => Some(TokenType::BANG_EQUAL),
'=' => Some(TokenType::EQUAL_EQUAL),
'<' => Some(TokenType::LESS_EQUAL),
'>' => Some(TokenType::GREATER_EQUAL),
_ => None
}
} else {
match c {
'!' => Some(TokenType::BANG),
'=' => Some(TokenType::EQUAL),
'<' => Some(TokenType::LESS),
'>' => Some(TokenType::GREATER),
_ => None
}
}
}
}
};
if token_type.is_none() {
error(self.line, "Unexpected character.");
}
token_type
}
fn read_string(&mut self) -> Option<TokenType> {
assert!(self.next_char_matches('"'));
self.advance();
let mut value = String::new();
while !self.next_char_matches('"') {
let x = self.advance()?;
if x == '\n' {
self.line += 1;
}
value.push(x);
}
if self.next_char_matches('"') {
self.advance(); // skip "
Some(TokenType::STRING(value))
} else {
error(self.line, "Unterminated string.");
None
}
}
fn scan_number(&mut self) -> Option<TokenType> {
assert!(self.char_iter.peek().map_or(false, |c| c.is_digit(10)));
while self.char_iter.peek().map_or(false, |c| c.is_digit(10)) {
self.advance();
}
if self.next_char_matches('.')
&& self.char_iter.n_peek(2).map_or(false, |c| c.is_digit(10)) {
self.advance();
}
while self.char_iter.peek().map_or(false, |c| c.is_digit(10)) {
self.advance();
}
self.cur_lexeme.as_str().parse::<f64>().map(|n| TokenType::NUMBER(n)).ok()
}
fn scan_identifier(&mut self) -> Option<TokenType> {
while self.char_iter.peek().map_or(false, |c| c.is_alphanumeric()) {
self.advance();
}
Some(match self.cur_lexeme.as_str() {
"and" => TokenType::AND,
"class" => TokenType::CLASS,
"else" => TokenType::ELSE,
"false" => TokenType::FALSE,
"for" => TokenType::FOR,
"fun" => TokenType::FUN,
"if" => TokenType::IF,
"nil" => TokenType::NIL,
"or" => TokenType::OR,
"print" => TokenType::PRINT,
"return" => TokenType::RETURN,
"super" => TokenType::SUPER,
"this" => TokenType::THIS,
"true" => TokenType::TRUE,
"var" => TokenType::VAR,
"while" => TokenType::WHILE,
a => TokenType::IDENTIFIER(a.to_string())
})
}
fn next_char_matches(&mut self, c: char) -> bool {
self.char_iter.peek().map_or(false, |a| *a == c)
}
}
pub fn scan_tokens(source: &str) -> Vec<Token> {
let mut tokens = Vec::new();
let mut scanner = Scanner::new(source);
while scanner.has_next() {
// start of a new lexeme
scanner.cur_lexeme.clear();
let token_type = scanner.scan_token();
tokens.extend(token_type.filter(|t| *t != TokenType::COMMENT).map(|t| Token {token_type: t, lexeme: scanner.cur_lexeme.to_string(), line: scanner.line}));
}
tokens.push(Token {token_type: TokenType::EOF, lexeme: "".to_string(), line: scanner.line});
tokens
}
// TODO: proper fmt::Display trait
// the literal is bundled in the TokenType
#[derive(Debug)]
#[derive(PartialEq)]
pub struct Token {
pub token_type: TokenType,
pub lexeme: String, // FIXME do we need lexeme?
pub line: i32
}
fn error(line: i32, message: &str) {
report(line, "", message);
}
fn report(line: i32, location: &str, message: &str) {
eprintln!("[line {} ] Error {} : {}", line, location, message);
}
#[derive(Debug)]
#[derive(PartialEq)]
#[allow(non_camel_case_types)]
pub enum TokenType {
// Single-character tokens.
LEFT_PAREN, RIGHT_PAREN, LEFT_BRACE, RIGHT_BRACE,
COMMA, DOT, MINUS, PLUS, SEMICOLON, SLASH, STAR,
// One or two character tokens.
BANG, BANG_EQUAL,
EQUAL, EQUAL_EQUAL,
GREATER, GREATER_EQUAL,
LESS, LESS_EQUAL,
// Literals.
IDENTIFIER(String), STRING(String), NUMBER(f64),
// Keywords.
AND, CLASS, ELSE, FALSE, FUN, FOR, IF, NIL, OR,
PRINT, RETURN, SUPER, THIS, TRUE, VAR, WHILE,
COMMENT, // FIXME can I get rid of this w/o complicating the scan_token implementation?
EOF
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
use std::path::Path;
fn assert_correctly_scanned_token(input: &str, expected_token_type: TokenType) {
let result = scan_tokens(input);
assert_eq!(2, result.len(), "{}", input);
assert_eq!(Token { line: 0, lexeme: input.to_string(), token_type:
expected_token_type }, result[0], "{}", input)
}
#[test]
fn scan_token() {
assert_correctly_scanned_token("(", TokenType::LEFT_PAREN);
assert_correctly_scanned_token(")", TokenType::RIGHT_PAREN);
assert_correctly_scanned_token("{", TokenType::LEFT_BRACE);
assert_correctly_scanned_token("}", TokenType::RIGHT_BRACE);
assert_correctly_scanned_token(",", TokenType::COMMA);
assert_correctly_scanned_token(".", TokenType::DOT);
assert_correctly_scanned_token("-", TokenType::MINUS);
assert_correctly_scanned_token("+", TokenType::PLUS);
assert_correctly_scanned_token(";", TokenType::SEMICOLON);
assert_correctly_scanned_token("/", TokenType::SLASH);
assert_correctly_scanned_token("*", TokenType::STAR);
assert_correctly_scanned_token("!", TokenType::BANG);
assert_correctly_scanned_token("!=", TokenType::BANG_EQUAL);
assert_correctly_scanned_token("=", TokenType::EQUAL);
assert_correctly_scanned_token("==", TokenType::EQUAL_EQUAL);
assert_correctly_scanned_token(">", TokenType::GREATER);
assert_correctly_scanned_token(">=", TokenType::GREATER_EQUAL);
assert_correctly_scanned_token("<", TokenType::LESS);
assert_correctly_scanned_token("<=", TokenType::LESS_EQUAL);
assert_correctly_scanned_token("\"string\"", TokenType::STRING("string".to_string()));
assert_correctly_scanned_token("1", TokenType::NUMBER(1.0));
assert_correctly_scanned_token("1.23", TokenType::NUMBER(1.23));
assert_correctly_scanned_token("identifier", TokenType::IDENTIFIER("identifier".to_string()));
assert_correctly_scanned_token("and", TokenType::AND);
assert_correctly_scanned_token("class", TokenType::CLASS);
assert_correctly_scanned_token("else", TokenType::ELSE);
assert_correctly_scanned_token("false", TokenType::FALSE);
assert_correctly_scanned_token("for", TokenType::FOR);
assert_correctly_scanned_token("fun", TokenType::FUN);
assert_correctly_scanned_token("if", TokenType::IF);
assert_correctly_scanned_token("nil", TokenType::NIL);
assert_correctly_scanned_token("or", TokenType::OR);
assert_correctly_scanned_token("print", TokenType::PRINT);
assert_correctly_scanned_token("return", TokenType::RETURN);
assert_correctly_scanned_token("super", TokenType::SUPER);
assert_correctly_scanned_token("this", TokenType::THIS);
assert_correctly_scanned_token("true", TokenType::TRUE);
assert_correctly_scanned_token("var", TokenType::VAR);
assert_correctly_scanned_token("while", TokenType::WHILE);
}
#[test]
fn scan_number_with_function_call() {
let tokens = scan_tokens("-123.sqrt()");
assert_eq!(7, tokens.len());
assert_eq!(TokenType::MINUS, tokens[0].token_type);
assert_eq!(TokenType::NUMBER(123.0), tokens[1].token_type);
assert_eq!(TokenType::DOT, tokens[2].token_type);
assert_eq!(TokenType::IDENTIFIER("sqrt".to_string()), tokens[3].token_type);
assert_eq!(TokenType::LEFT_PAREN, tokens[4].token_type);
assert_eq!(TokenType::RIGHT_PAREN, tokens[5].token_type);
assert_eq!(TokenType::EOF, tokens[6].token_type);
}
#[test]
fn lox_files_are_scanned_correctly() {
scanned_file_matches_token_types("test_data/example-0.lox",
vec![TokenType::FUN,
TokenType::IDENTIFIER("someFun".to_string()),
TokenType::LEFT_PAREN,
TokenType::IDENTIFIER("someParam".to_string()),
TokenType::RIGHT_PAREN,
TokenType::LEFT_BRACE,
TokenType::RETURN,
TokenType::NUMBER(0.0),
TokenType::SEMICOLON,
TokenType::RIGHT_BRACE,
TokenType::EOF]
);
scanned_file_matches_token_types("test_data/example-1.lox",
vec![TokenType::FOR,
TokenType::LEFT_PAREN,
TokenType::VAR,
TokenType::IDENTIFIER("a".to_string()),
TokenType::EQUAL,
TokenType::NUMBER(1.0),
TokenType::SEMICOLON,
TokenType::IDENTIFIER("a".to_string()),
TokenType::LESS,
TokenType::NUMBER(10.0),
TokenType::SEMICOLON,
TokenType::IDENTIFIER("a".to_string()),
TokenType::EQUAL,
TokenType::IDENTIFIER("a".to_string()),
TokenType::PLUS,
TokenType::NUMBER(1.0),
TokenType::RIGHT_PAREN,
TokenType::LEFT_BRACE,
TokenType::PRINT,
TokenType::IDENTIFIER("a".to_string()),
TokenType::SEMICOLON,
TokenType::RIGHT_BRACE,
TokenType::EOF]
);
scanned_file_matches_token_types("test_data/example-2.lox",
vec![TokenType::CLASS,
TokenType::IDENTIFIER("SomeClass".to_string()),
TokenType::LEFT_BRACE,
TokenType::IDENTIFIER("someMethod".to_string()),
TokenType::LEFT_PAREN,
TokenType::IDENTIFIER("someParam".to_string()),
TokenType::RIGHT_PAREN,
TokenType::LEFT_BRACE,
TokenType::VAR,
TokenType::IDENTIFIER("someVar".to_string()),
TokenType::EQUAL,
TokenType::IDENTIFIER("someParam".to_string()),
TokenType::PLUS,
TokenType::STRING("text'!".to_string()),
TokenType::SEMICOLON,
TokenType::RETURN,
TokenType::IDENTIFIER("someVar".to_string()),
TokenType::SEMICOLON,
TokenType::RIGHT_BRACE,
TokenType::RIGHT_BRACE,
TokenType::EOF]
);
}
fn scanned_file_matches_token_types(file_name: &str, expected: Vec<TokenType>) {
let path = Path::new(file_name);
match fs::read_to_string(path) {
Ok(file_content) => {
let tokens = scan_tokens(&file_content);
assert_eq!(tokens.len(), expected.len());
for (token, expected_type) in tokens.iter().zip(expected.iter()) {
assert_eq!(token.token_type, *expected_type);
}
}
Err(why) => assert!(false, "{:?}", why)
}
}
}
|
pub enum ShellAction {
// 退出命令行
Exit(i32),
// 清屏
ClearHost,
// 更换当前所在目录
ChangePath(String),
// 输出一些内容
OutputResult(String),
}
|
use crate::set::Set;
use core::{iter::FromIterator, option};
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SmallSet<A> {
Singleton(A),
Empty,
}
impl<A> Default for SmallSet<A> {
fn default() -> Self {
SmallSet::Empty
}
}
impl<A> SmallSet<A> {
pub fn into_option(self) -> Option<A> {
match self {
SmallSet::Singleton(item) => Some(item),
SmallSet::Empty => None,
}
}
pub fn as_option(&self) -> Option<&A> {
match self {
SmallSet::Singleton(item) => Some(item),
SmallSet::Empty => None,
}
}
pub fn as_option_mut(&mut self) -> Option<&mut A> {
match self {
SmallSet::Singleton(item) => Some(item),
SmallSet::Empty => None,
}
}
}
impl<A> From<Option<A>> for SmallSet<A> {
fn from(src: Option<A>) -> Self {
match src {
Some(item) => SmallSet::Singleton(item),
None => SmallSet::Empty,
}
}
}
impl<A> Set<A> for SmallSet<A>
where
A: Eq,
{
fn size(&self) -> usize {
match self {
SmallSet::Singleton(_) => 1,
SmallSet::Empty => 0,
}
}
fn contains(&self, value: &A) -> bool {
match self {
SmallSet::Singleton(item) => item == value,
SmallSet::Empty => false,
}
}
fn is_subset<R: Set<A>>(&self, other: R) -> bool {
match self {
SmallSet::Singleton(item) => other.contains(&item),
SmallSet::Empty => true,
}
}
fn cloned(&self) -> Self
where
A: Clone,
{
self.clone()
}
}
impl<A> IntoIterator for SmallSet<A> {
type IntoIter = option::IntoIter<A>;
type Item = A;
fn into_iter(self) -> Self::IntoIter {
self.into_option().into_iter()
}
}
impl<'a, A> IntoIterator for &'a SmallSet<A> {
type IntoIter = option::IntoIter<&'a A>;
type Item = &'a A;
fn into_iter(self) -> Self::IntoIter {
self.as_option().into_iter()
}
}
impl<'a, A> IntoIterator for &'a mut SmallSet<A> {
type IntoIter = option::IntoIter<&'a mut A>;
type Item = &'a mut A;
fn into_iter(self) -> Self::IntoIter {
self.as_option_mut().into_iter()
}
}
impl<A> FromIterator<A> for SmallSet<A> {
fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self {
iter.into_iter().last().into()
}
}
impl<A> Extend<A> for SmallSet<A> {
fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T) {
*self = iter.into_iter().last().into();
}
}
|
// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
use super::SstPartitionerResult;
use crocksdb_ffi::{
self, DBSstPartitioner, DBSstPartitionerContext, DBSstPartitionerFactory,
DBSstPartitionerRequest,
};
use libc::{c_char, c_uchar, c_void, size_t};
use std::{ffi::CString, ptr, slice};
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct SstPartitionerRequest<'a> {
pub prev_user_key: &'a [u8],
pub current_user_key: &'a [u8],
pub current_output_file_size: u64,
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct SstPartitionerContext<'a> {
pub is_full_compaction: bool,
pub is_manual_compaction: bool,
pub output_level: i32,
pub smallest_key: &'a [u8],
pub largest_key: &'a [u8],
}
pub trait SstPartitioner {
fn should_partition(&mut self, req: &SstPartitionerRequest) -> SstPartitionerResult;
fn can_do_trivial_move(&mut self, smallest_user_key: &[u8], largest_user_key: &[u8]) -> bool;
}
extern "C" fn sst_partitioner_destructor<P: SstPartitioner>(ctx: *mut c_void) {
unsafe {
// Recover from raw pointer and implicitly drop.
let _ = Box::from_raw(ctx as *mut P);
}
}
extern "C" fn sst_partitioner_should_partition<P: SstPartitioner>(
ctx: *mut c_void,
request: *mut DBSstPartitionerRequest,
) -> SstPartitionerResult {
let partitioner = unsafe { &mut *(ctx as *mut P) };
let req = unsafe {
let mut prev_key_len: usize = 0;
let prev_key = crocksdb_ffi::crocksdb_sst_partitioner_request_prev_user_key(
request,
&mut prev_key_len,
) as *const u8;
let mut current_key_len: usize = 0;
let current_key = crocksdb_ffi::crocksdb_sst_partitioner_request_current_user_key(
request,
&mut current_key_len,
) as *const u8;
SstPartitionerRequest {
prev_user_key: slice::from_raw_parts(prev_key, prev_key_len),
current_user_key: slice::from_raw_parts(current_key, current_key_len),
current_output_file_size:
crocksdb_ffi::crocksdb_sst_partitioner_request_current_output_file_size(request),
}
};
partitioner.should_partition(&req) as _
}
extern "C" fn sst_partitioner_can_do_trivial_move<P: SstPartitioner>(
ctx: *mut c_void,
smallest_user_key: *const c_char,
smallest_user_key_len: size_t,
largest_user_key: *const c_char,
largest_user_key_len: size_t,
) -> c_uchar {
let partitioner = unsafe { &mut *(ctx as *mut P) };
let smallest_key =
unsafe { slice::from_raw_parts(smallest_user_key as *const u8, smallest_user_key_len) };
let largest_key =
unsafe { slice::from_raw_parts(largest_user_key as *const u8, largest_user_key_len) };
partitioner.can_do_trivial_move(smallest_key, largest_key) as _
}
pub trait SstPartitionerFactory: Sync + Send {
type Partitioner: SstPartitioner + 'static;
fn name(&self) -> &CString;
fn create_partitioner(&self, context: &SstPartitionerContext) -> Option<Self::Partitioner>;
}
extern "C" fn sst_partitioner_factory_destroy<F: SstPartitionerFactory>(ctx: *mut c_void) {
unsafe {
// Recover from raw pointer and implicitly drop.
let _ = Box::from_raw(ctx as *mut F);
}
}
extern "C" fn sst_partitioner_factory_name<F: SstPartitionerFactory>(
ctx: *mut c_void,
) -> *const c_char {
let factory = unsafe { &*(ctx as *mut F) };
factory.name().as_ptr()
}
extern "C" fn sst_partitioner_factory_create_partitioner<F: SstPartitionerFactory>(
ctx: *mut c_void,
context: *mut DBSstPartitionerContext,
) -> *mut DBSstPartitioner {
let factory = unsafe { &*(ctx as *mut F) };
let context = unsafe {
let mut smallest_key_len: usize = 0;
let smallest_key = crocksdb_ffi::crocksdb_sst_partitioner_context_smallest_key(
context,
&mut smallest_key_len,
) as *const u8;
let mut largest_key_len: usize = 0;
let largest_key = crocksdb_ffi::crocksdb_sst_partitioner_context_largest_key(
context,
&mut largest_key_len,
) as *const u8;
SstPartitionerContext {
is_full_compaction: crocksdb_ffi::crocksdb_sst_partitioner_context_is_full_compaction(
context,
) != 0,
is_manual_compaction:
crocksdb_ffi::crocksdb_sst_partitioner_context_is_manual_compaction(context) != 0,
output_level: crocksdb_ffi::crocksdb_sst_partitioner_context_output_level(context),
smallest_key: slice::from_raw_parts(smallest_key, smallest_key_len),
largest_key: slice::from_raw_parts(largest_key, largest_key_len),
}
};
match factory.create_partitioner(&context) {
None => ptr::null_mut(),
Some(partitioner) => {
let ctx = Box::into_raw(Box::new(partitioner)) as *mut c_void;
unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_create(
ctx,
sst_partitioner_destructor::<F::Partitioner>,
sst_partitioner_should_partition::<F::Partitioner>,
sst_partitioner_can_do_trivial_move::<F::Partitioner>,
)
}
}
}
}
pub fn new_sst_partitioner_factory<F: SstPartitionerFactory>(
factory: F,
) -> *mut DBSstPartitionerFactory {
unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_factory_create(
Box::into_raw(Box::new(factory)) as *mut c_void,
sst_partitioner_factory_destroy::<F>,
sst_partitioner_factory_name::<F>,
sst_partitioner_factory_create_partitioner::<F>,
)
}
}
#[cfg(test)]
mod test {
use std::{
ffi::{CStr, CString},
sync::{Arc, Mutex},
};
use super::*;
struct TestState {
pub call_create_partitioner: usize,
pub call_should_partition: usize,
pub call_can_do_trivial_move: usize,
pub drop_partitioner: usize,
pub drop_factory: usize,
pub should_partition_result: SstPartitionerResult,
pub can_do_trivial_move_result: bool,
pub no_partitioner: bool,
// SstPartitionerRequest fields
pub prev_user_key: Option<Vec<u8>>,
pub current_user_key: Option<Vec<u8>>,
pub current_output_file_size: Option<u64>,
// can_do_trivial_move params
pub trivial_move_smallest_key: Option<Vec<u8>>,
pub trivial_move_largest_key: Option<Vec<u8>>,
// SstPartitionerContext fields
pub is_full_compaction: Option<bool>,
pub is_manual_compaction: Option<bool>,
pub output_level: Option<i32>,
pub smallest_key: Option<Vec<u8>>,
pub largest_key: Option<Vec<u8>>,
}
impl Default for TestState {
fn default() -> Self {
TestState {
call_create_partitioner: 0,
call_should_partition: 0,
call_can_do_trivial_move: 0,
drop_partitioner: 0,
drop_factory: 0,
should_partition_result: SstPartitionerResult::NotRequired,
can_do_trivial_move_result: false,
no_partitioner: false,
prev_user_key: None,
current_user_key: None,
current_output_file_size: None,
trivial_move_smallest_key: None,
trivial_move_largest_key: None,
is_full_compaction: None,
is_manual_compaction: None,
output_level: None,
smallest_key: None,
largest_key: None,
}
}
}
struct TestSstPartitioner {
state: Arc<Mutex<TestState>>,
}
impl SstPartitioner for TestSstPartitioner {
fn should_partition(&mut self, req: &SstPartitionerRequest) -> SstPartitionerResult {
let mut s = self.state.lock().unwrap();
s.call_should_partition += 1;
s.prev_user_key = Some(req.prev_user_key.to_vec());
s.current_user_key = Some(req.current_user_key.to_vec());
s.current_output_file_size = Some(req.current_output_file_size);
s.should_partition_result
}
fn can_do_trivial_move(&mut self, smallest_key: &[u8], largest_key: &[u8]) -> bool {
let mut s = self.state.lock().unwrap();
s.call_can_do_trivial_move += 1;
s.trivial_move_smallest_key = Some(smallest_key.to_vec());
s.trivial_move_largest_key = Some(largest_key.to_vec());
s.can_do_trivial_move_result
}
}
impl Drop for TestSstPartitioner {
fn drop(&mut self) {
self.state.lock().unwrap().drop_partitioner += 1;
}
}
lazy_static! {
static ref FACTORY_NAME: CString =
CString::new(b"TestSstPartitionerFactory".to_vec()).unwrap();
}
struct TestSstPartitionerFactory {
state: Arc<Mutex<TestState>>,
}
impl SstPartitionerFactory for TestSstPartitionerFactory {
type Partitioner = TestSstPartitioner;
fn name(&self) -> &CString {
&FACTORY_NAME
}
fn create_partitioner(&self, context: &SstPartitionerContext) -> Option<Self::Partitioner> {
let mut s = self.state.lock().unwrap();
s.call_create_partitioner += 1;
if s.no_partitioner {
return None;
}
s.is_full_compaction = Some(context.is_full_compaction);
s.is_manual_compaction = Some(context.is_manual_compaction);
s.output_level = Some(context.output_level);
s.smallest_key = Some(context.smallest_key.to_vec());
s.largest_key = Some(context.largest_key.to_vec());
Some(TestSstPartitioner {
state: self.state.clone(),
})
}
}
impl Drop for TestSstPartitionerFactory {
fn drop(&mut self) {
self.state.lock().unwrap().drop_factory += 1;
}
}
#[test]
fn factory_name() {
let s = Arc::new(Mutex::new(TestState::default()));
let factory = new_sst_partitioner_factory(TestSstPartitionerFactory { state: s });
let factory_name =
unsafe { CStr::from_ptr(crocksdb_ffi::crocksdb_sst_partitioner_factory_name(factory)) };
assert_eq!(*FACTORY_NAME.as_c_str(), *factory_name);
unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_factory_destroy(factory);
}
}
#[test]
fn factory_create_partitioner() {
const IS_FULL_COMPACTION: bool = false;
const IS_MANUAL_COMPACTION: bool = true;
const OUTPUT_LEVEL: i32 = 3;
const SMALLEST_KEY: &[u8] = b"aaaa";
const LARGEST_KEY: &[u8] = b"bbbb";
let s = Arc::new(Mutex::new(TestState::default()));
let factory = new_sst_partitioner_factory(TestSstPartitionerFactory { state: s.clone() });
let context = unsafe { crocksdb_ffi::crocksdb_sst_partitioner_context_create() };
unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_context_set_is_full_compaction(
context,
IS_FULL_COMPACTION as _,
);
crocksdb_ffi::crocksdb_sst_partitioner_context_set_is_manual_compaction(
context,
IS_MANUAL_COMPACTION as _,
);
crocksdb_ffi::crocksdb_sst_partitioner_context_set_output_level(context, OUTPUT_LEVEL);
crocksdb_ffi::crocksdb_sst_partitioner_context_set_smallest_key(
context,
SMALLEST_KEY.as_ptr() as *const c_char,
SMALLEST_KEY.len(),
);
crocksdb_ffi::crocksdb_sst_partitioner_context_set_largest_key(
context,
LARGEST_KEY.as_ptr() as *const c_char,
LARGEST_KEY.len(),
);
}
let partitioner = unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_factory_create_partitioner(factory, context)
};
{
let sl = s.lock().unwrap();
assert_eq!(1, sl.call_create_partitioner);
assert_eq!(IS_FULL_COMPACTION, sl.is_full_compaction.unwrap());
assert_eq!(IS_MANUAL_COMPACTION, sl.is_manual_compaction.unwrap());
assert_eq!(OUTPUT_LEVEL, sl.output_level.unwrap());
assert_eq!(SMALLEST_KEY, sl.smallest_key.as_ref().unwrap().as_slice());
assert_eq!(LARGEST_KEY, sl.largest_key.as_ref().unwrap().as_slice());
}
unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_destroy(partitioner);
crocksdb_ffi::crocksdb_sst_partitioner_factory_destroy(factory);
}
}
#[test]
fn factory_create_no_partitioner() {
let s = Arc::new(Mutex::new(TestState::default()));
s.lock().unwrap().no_partitioner = true;
let factory = new_sst_partitioner_factory(TestSstPartitionerFactory { state: s.clone() });
let context = unsafe { crocksdb_ffi::crocksdb_sst_partitioner_context_create() };
let partitioner = unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_factory_create_partitioner(factory, context)
};
assert_eq!(1, s.lock().unwrap().call_create_partitioner);
assert_eq!(ptr::null_mut(), partitioner);
unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_factory_destroy(factory);
}
}
#[test]
fn partitioner_should_partition() {
const SHOULD_PARTITION: SstPartitionerResult = SstPartitionerResult::Required;
const PREV_KEY: &[u8] = b"test_key_abc";
const CURRENT_KEY: &[u8] = b"test_key_def";
const CURRENT_OUTPUT_FILE_SIZE: u64 = 1234567;
let s = Arc::new(Mutex::new(TestState::default()));
s.lock().unwrap().should_partition_result = SHOULD_PARTITION;
let factory = new_sst_partitioner_factory(TestSstPartitionerFactory { state: s.clone() });
let context = unsafe { crocksdb_ffi::crocksdb_sst_partitioner_context_create() };
let partitioner = unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_factory_create_partitioner(factory, context)
};
let req = unsafe { crocksdb_ffi::crocksdb_sst_partitioner_request_create() };
unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_request_set_prev_user_key(
req,
PREV_KEY.as_ptr() as *const c_char,
PREV_KEY.len(),
);
crocksdb_ffi::crocksdb_sst_partitioner_request_set_current_user_key(
req,
CURRENT_KEY.as_ptr() as *const c_char,
CURRENT_KEY.len(),
);
crocksdb_ffi::crocksdb_sst_partitioner_request_set_current_output_file_size(
req,
CURRENT_OUTPUT_FILE_SIZE,
);
}
let should_partition =
unsafe { crocksdb_ffi::crocksdb_sst_partitioner_should_partition(partitioner, req) };
assert_eq!(SHOULD_PARTITION, should_partition);
{
let sl = s.lock().unwrap();
assert_eq!(1, sl.call_create_partitioner);
assert_eq!(1, sl.call_should_partition);
assert_eq!(0, sl.call_can_do_trivial_move);
assert_eq!(PREV_KEY, sl.prev_user_key.as_ref().unwrap().as_slice());
assert_eq!(
CURRENT_KEY,
sl.current_user_key.as_ref().unwrap().as_slice()
);
assert_eq!(
CURRENT_OUTPUT_FILE_SIZE,
sl.current_output_file_size.unwrap()
);
}
unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_destroy(partitioner);
crocksdb_ffi::crocksdb_sst_partitioner_factory_destroy(factory);
}
}
#[test]
fn partitioner_can_do_trivial_move() {
const SMALLEST_KEY: &[u8] = b"test_key_abc";
const LARGEST_KEY: &[u8] = b"test_key_def";
const RESULT: bool = true;
let s = Arc::new(Mutex::new(TestState::default()));
s.lock().unwrap().can_do_trivial_move_result = RESULT;
let factory = new_sst_partitioner_factory(TestSstPartitionerFactory { state: s.clone() });
let context = unsafe { crocksdb_ffi::crocksdb_sst_partitioner_context_create() };
let partitioner = unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_factory_create_partitioner(factory, context)
};
let result = unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_can_do_trivial_move(
partitioner,
SMALLEST_KEY.as_ptr() as *const c_char,
SMALLEST_KEY.len(),
LARGEST_KEY.as_ptr() as *const c_char,
LARGEST_KEY.len(),
)
};
{
let sl = s.lock().unwrap();
assert_eq!(1, sl.call_create_partitioner);
assert_eq!(0, sl.call_should_partition);
assert_eq!(1, sl.call_can_do_trivial_move);
assert_eq!(
SMALLEST_KEY,
sl.trivial_move_smallest_key.as_ref().unwrap().as_slice()
);
assert_eq!(
LARGEST_KEY,
sl.trivial_move_largest_key.as_ref().unwrap().as_slice()
);
assert_eq!(RESULT, result);
}
unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_destroy(partitioner);
crocksdb_ffi::crocksdb_sst_partitioner_factory_destroy(factory);
}
}
#[test]
fn drop() {
let s = Arc::new(Mutex::new(TestState::default()));
let factory = new_sst_partitioner_factory(TestSstPartitionerFactory { state: s.clone() });
let context = unsafe { crocksdb_ffi::crocksdb_sst_partitioner_context_create() };
let partitioner = unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_factory_create_partitioner(factory, context)
};
{
let sl = s.lock().unwrap();
assert_eq!(0, sl.drop_partitioner);
assert_eq!(0, sl.drop_factory);
}
unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_destroy(partitioner);
}
{
let sl = s.lock().unwrap();
assert_eq!(1, sl.drop_partitioner);
assert_eq!(0, sl.drop_factory);
}
unsafe {
crocksdb_ffi::crocksdb_sst_partitioner_factory_destroy(factory);
}
{
let sl = s.lock().unwrap();
assert_eq!(1, sl.drop_partitioner);
assert_eq!(1, sl.drop_factory);
}
}
}
|
//! numbers and statistics types and methods
use crate::qtable::Filter;
use num::Float;
use std::cmp::Ordering;
use std::f64::NAN;
/// f64 extensions trait
pub trait F64Ext<T> {
fn frmtf64(&self, sig: usize, nan: &str) -> String;
fn frmtint(&self, nan: &str) -> String;
}
impl F64Ext<f64> for f64 {
fn frmtf64(&self, mut sig: usize, nan: &str) -> String {
if self.is_nan() || self.is_infinite() {
return nan.to_string();
}
if sig < 1 {
sig = 1;
}
let mut prc = sig - 1;
let lgx = self.abs().log10();
if (lgx >= -3. && lgx <= (sig as f64)) || *self == 0. {
if *self != 0. {
let a = prc as isize;
let b = lgx.trunc() as isize;
if a <= b {
prc = 0;
} else {
prc = (a - b) as usize;
}
if lgx < 0. {
prc += 1
}
}
format!("{:.1$}", self, prc)
} else {
let f1 = format!("{:.1$e}", self, prc);
let f2: Vec<&str> = f1.split('e').collect();
match f2[1].starts_with('-') {
true => {
let e = f2[1].trim_start_matches('-');
match e.len() {
1 => format!("{}e-0{}", f2[0], e),
_ => format!("{}e-{}", f2[0], e),
}
}
false => match f2[1].len() {
1 => format!("{}e+0{}", f2[0], f2[1]),
_ => format!("{}e+{}", f2[0], f2[1]),
},
}
}
}
fn frmtint(&self, nan: &str) -> String {
if self.is_nan() || self.is_infinite() {
return nan.to_string();
}
format!("{:.0}", self)
}
}
/// sort function positioning NaN values at the end
fn value_nans_last<T: Float>(a: &T, b: &T) -> Ordering {
match (a, b) {
(x, y) if x.is_nan() && y.is_nan() => Ordering::Equal,
(x, _) if x.is_nan() => Ordering::Greater,
(_, y) if y.is_nan() => Ordering::Less,
(_, _) => a.partial_cmp(b).unwrap(),
}
}
/// vector of f64
#[derive(Debug, Clone, PartialEq)]
pub struct Numbers {
pub(crate) data: Vec<f64>,
}
impl Numbers {
/// create new Numbers vector of f64 from vector of String,
/// skipping invalid values, float_limits, outliers
pub fn new(vals: &Vec<String>, float_limit: f64, filter_by: &Filter) -> Self {
let data = vals
.iter()
.filter_map(|s| s.parse::<f64>().ok())
.filter(|v| !v.is_nan())
.filter(|v| *v < float_limit)
.collect::<Vec<f64>>();
let numbers = Numbers::from_f64(data);
match filter_by {
Filter::None => numbers,
Filter::IQR(k) => {
let kiqr = *k * numbers.iqr();
let p25 = numbers.p25();
let p75 = numbers.p75();
let data = numbers
.data
.clone()
.into_iter()
.filter(|x| *x > p25 - kiqr && *x < p75 + kiqr)
.collect();
Numbers::from_f64(data)
}
Filter::ZScore(k) => {
let mea = numbers.mea();
let std = numbers.std();
let data = numbers
.data
.clone()
.into_iter()
.filter(|x| (*x - mea / std).abs() < *k)
.collect();
Numbers::from_f64(data)
}
Filter::Lower(f) => {
let data = numbers
.data
.clone()
.into_iter()
.filter(|x| *x > *f)
.collect();
Numbers::from_f64(data)
}
Filter::Upper(g) => {
let data = numbers
.data
.clone()
.into_iter()
.filter(|x| *x < *g)
.collect();
Numbers::from_f64(data)
}
Filter::Between(f, g) => {
let data = numbers
.data
.clone()
.into_iter()
.filter(|x| *x > *f && *x < *g)
.collect();
Numbers::from_f64(data)
}
}
}
pub fn from_f64(mut data: Vec<f64>) -> Self {
data.sort_by(value_nans_last);
Numbers { data }
}
/// mean of Numbers vector of f64
pub fn mea(&self) -> f64 {
let mut i = 0.0;
let mut mean = 0.0;
for x in &self.data {
if !x.is_nan() {
i += 1.0;
mean += (x - mean) / i;
}
}
if i > 0.0 {
mean
} else {
NAN
}
}
/// yield of Numbers vector of f64
pub fn yld(&self, lowlim: &f64, upplim: &f64) -> f64 {
if lowlim.is_nan() && upplim.is_nan() {
return NAN;
}
let cnt = self.cnt() as usize;
if cnt == 0 {
return NAN;
}
let mut lo = 0;
let mut hi = 0;
if !lowlim.is_nan() {
lo = self.data.iter().filter(|&x| x < lowlim).count();
}
if !upplim.is_nan() {
hi = self.data.iter().filter(|&x| x > upplim).count();
}
100.0 * ((cnt - lo - hi) as f64 / cnt as f64)
}
// k
pub fn k(&self, lsl: &f64, tgt: &f64, usl: &f64) -> f64 {
let mut k = NAN;
let mea = &self.mea();
if !tgt.is_nan() {
if !lsl.is_nan() && !usl.is_nan() {
if mea <= tgt {
k = (mea - tgt) / (tgt - lsl)
} else {
k = (mea - tgt) / (usl - tgt)
}
}
if lsl.is_nan() && !usl.is_nan() {
k = (mea - tgt) / (usl - tgt)
}
if !lsl.is_nan() && usl.is_nan() {
k = (mea - tgt) / (tgt - lsl)
}
}
k
}
// cpk
pub fn cpk(&self, lsl: &f64, usl: &f64) -> f64 {
let mut cpk = NAN;
let mea = &self.mea();
let std = &self.std();
if !lsl.is_nan() && !usl.is_nan() {
let n = Numbers::from_f64(vec![usl - mea, mea - lsl]);
cpk = n.min() / (3.0 * std)
}
if lsl.is_nan() && !usl.is_nan() {
cpk = (usl - mea) / (3.0 * std)
}
if !lsl.is_nan() && usl.is_nan() {
cpk = (mea - lsl) / (3.0 * std)
}
cpk
}
// cp
pub fn cp(&self, lsl: &f64, usl: &f64) -> f64 {
let mut cp = NAN;
let mea = &self.mea();
let std = &self.std();
if !lsl.is_nan() && !usl.is_nan() {
cp = (usl - lsl) / (6.0 * std)
}
if lsl.is_nan() && !usl.is_nan() {
cp = (usl - mea) / (3.0 * std)
}
if !lsl.is_nan() && usl.is_nan() {
cp = (mea - lsl) / (3.0 * std)
}
cp
}
/// variance of Numbers vector of f64
pub fn var(&self) -> f64 {
let mut sum = match &self.data.iter().next() {
None => NAN,
Some(x) => **x,
};
let mut i = 1.0;
let mut variance = 0.0;
for x in &self.data {
if !x.is_nan() {
i += 1.0;
sum += *x;
let diff = i * x - sum;
variance += diff * diff / (i * (i - 1.0))
}
}
if i > 1.0 {
variance / (i - 1.0)
} else {
NAN
}
}
/// standard deviation of Numbers vector of f64
pub fn std(&self) -> f64 {
self.var().sqrt()
}
/// minimum of Numbers vector of f64
pub fn min(&self) -> f64 {
match self.data.len() {
0 => NAN,
_ => self.data[0],
}
}
/// maximum of Numbers vector of f64
pub fn max(&self) -> f64 {
match self.data.len() {
0 => NAN,
_ => self.data[self.data.len() - 1],
}
}
/// range(min,max) of Numbers vector of f64
pub fn range(&self) -> (f64, f64) {
match self.data.len() {
0 => (NAN, NAN),
_ => (self.data[0], self.data[self.data.len() - 1]),
}
}
/// percentile of Numbers vector of f64
pub fn prc(&self, proc: f64) -> f64 {
let mut p = proc.abs();
if p >= 1.0 {
p = p / 100.0;
}
if p >= 1.0 {
return NAN;
}
match self.data.len() {
0 => NAN,
1 => self.data[0],
2 => self.mea(),
_ => {
let i = (p * self.cnt()) as usize;
self.data[i]
}
}
}
/// p25
pub fn p25(&self) -> f64 {
self.prc(0.25)
}
/// p75
pub fn p75(&self) -> f64 {
self.prc(0.75)
}
/// iqr
pub fn iqr(&self) -> f64 {
self.prc(0.75) - self.prc(0.25)
}
/// median of Numbers vector of f64
pub fn med(&self) -> f64 {
match self.data.len() {
0 => NAN,
1 => self.data[0],
2 => self.mea(),
_ => self.prc(0.5),
}
}
/// count of Numbers vector of f64
pub fn cnt(&self) -> f64 {
let l = self.data.len();
match l {
0 => NAN,
_ => l as f64,
}
}
pub fn bins_delta(&self, mut n: usize) -> (Vec<usize>, f64) {
if n < 3 {
n = 11;
}
let min = self.min();
let max = self.max();
let d = (max - min) / 11.0;
let mut a = min;
let mut b = min + d;
let mut bins: Vec<usize> = vec![0; n];
let mut i = 0;
for v in self.data.iter() {
if v > &b && i < bins.len() - 2 {
a = a + d;
b = b + d;
i = i + 1;
}
bins[i] += 1;
}
if min.is_nan() || d == 0.0 {
bins = vec![];
}
(bins, d)
}
}
|
mod cli;
mod tool;
mod update;
pub use tool::Tool;
pub use cli::CommandLineInterface;
fn main() {
let tool = Tool::new()
.name("e+")
.help("Positron Project CLI")
.tool(crate::update::tool());
CommandLineInterface::main(tool)
}
|
use std::fmt::Debug;
use crate::interaction::SurfaceInteraction;
use super::Texture;
#[derive(Debug)]
pub struct ConstantTexture<T: Clone + Debug> {
value: T,
}
impl<T: Clone + Debug> ConstantTexture<T> {
pub fn new(value: impl Into<T>) -> Self {
Self {
value: value.into(),
}
}
}
impl<T: Clone + Debug> Texture<T> for ConstantTexture<T> {
fn evaluate(&self, _: &SurfaceInteraction<'_>) -> T {
self.value.clone()
}
}
|
mod cli;
mod help;
use std::{
ffi::{CStr, CString, OsString},
fs::{File, Permissions},
io::{self, Read, Seek, Write},
os::unix::prelude::{MetadataExt, OsStringExt, PermissionsExt},
path::{Path, PathBuf},
process::Command,
};
use crate::{
sudoers::Sudoers,
system::{
can_execute,
file::{Chown, FileLock},
signal::{consts::*, register_handlers, SignalStream},
User,
},
};
use self::cli::{VisudoAction, VisudoOptions};
use self::help::{long_help_message, USAGE_MSG};
const VERSION: &str = env!("CARGO_PKG_VERSION");
macro_rules! io_msg {
($err:expr, $($tt:tt)*) => {
io::Error::new($err.kind(), format!("{}: {}", format_args!($($tt)*), $err))
};
}
pub fn main() {
let options = match VisudoOptions::from_env() {
Ok(options) => options,
Err(error) => {
println_ignore_io_error!("visudo: {error}\n{USAGE_MSG}");
std::process::exit(1);
}
};
let cmd = match options.action {
VisudoAction::Help => {
println_ignore_io_error!("{}", long_help_message());
std::process::exit(0);
}
VisudoAction::Version => {
println_ignore_io_error!("visudo version {VERSION}");
std::process::exit(0);
}
VisudoAction::Check => check,
VisudoAction::Run => run,
};
match cmd(options.file.as_deref(), options.perms, options.owner) {
Ok(()) => {}
Err(error) => {
eprintln_ignore_io_error!("visudo: {error}");
std::process::exit(1);
}
}
}
fn check(file_arg: Option<&str>, perms: bool, owner: bool) -> io::Result<()> {
let sudoers_path = Path::new(file_arg.unwrap_or("/etc/sudoers"));
let sudoers_file = File::open(sudoers_path)
.map_err(|err| io_msg!(err, "unable to open {}", sudoers_path.display()))?;
let metadata = sudoers_file.metadata()?;
if file_arg.is_none() || perms {
// For some reason, the MSB of the mode is on so we need to mask it.
let mode = metadata.permissions().mode() & 0o777;
if mode != 0o440 {
return Err(io::Error::new(
io::ErrorKind::Other,
format!(
"{}: bad permissions, should be mode 0440, but found {mode:04o}",
sudoers_path.display()
),
));
}
}
if file_arg.is_none() || owner {
let owner = (metadata.uid(), metadata.gid());
if owner != (0, 0) {
return Err(io::Error::new(
io::ErrorKind::Other,
format!(
"{}: wrong owner (uid, gid) should be (0, 0), but found {owner:?}",
sudoers_path.display()
),
));
}
}
let (_sudoers, errors) = Sudoers::read(&sudoers_file, sudoers_path)?;
if errors.is_empty() {
writeln!(io::stdout(), "{}: parsed OK", sudoers_path.display())?;
return Ok(());
}
let mut stderr = io::stderr();
for crate::sudoers::Error(_position, message) in errors {
writeln!(stderr, "syntax error: {message}")?;
}
Err(io::Error::new(io::ErrorKind::Other, "invalid sudoers file"))
}
fn run(file_arg: Option<&str>, perms: bool, owner: bool) -> io::Result<()> {
let sudoers_path = Path::new(file_arg.unwrap_or("/etc/sudoers"));
let (sudoers_file, existed) = if sudoers_path.exists() {
let file = File::options().read(true).write(true).open(sudoers_path)?;
(file, true)
} else {
// Create a sudoers file if it doesn't exist.
let file = File::create(sudoers_path)?;
// ogvisudo sets the permissions of the file so it can be read and written by the user and
// read by the group if the `-f` argument was passed.
if file_arg.is_some() {
file.set_permissions(Permissions::from_mode(0o640))?;
}
(file, false)
};
let lock = FileLock::exclusive(&sudoers_file, true).map_err(|err| {
if err.kind() == io::ErrorKind::WouldBlock {
io_msg!(err, "{} busy, try again later", sudoers_path.display())
} else {
err
}
})?;
if perms || file_arg.is_none() {
sudoers_file.set_permissions(Permissions::from_mode(0o440))?;
}
if owner || file_arg.is_none() {
sudoers_file.chown(User::real_uid(), User::real_gid())?;
}
let signal_stream = SignalStream::init()?;
let handlers = register_handlers([SIGTERM, SIGHUP, SIGINT, SIGQUIT])?;
let tmp_dir = create_temporary_dir()?;
let tmp_path = tmp_dir.join("sudoers");
{
let tmp_dir = tmp_dir.clone();
std::thread::spawn(|| -> io::Result<()> {
signal_stream.recv()?;
let _ = std::fs::remove_dir_all(tmp_dir);
drop(handlers);
std::process::exit(1)
});
}
let tmp_file = File::options()
.read(true)
.write(true)
.create(true)
.open(&tmp_path)?;
tmp_file.set_permissions(Permissions::from_mode(0o700))?;
let result = edit_sudoers_file(
existed,
sudoers_file,
sudoers_path,
lock,
tmp_file,
&tmp_path,
);
std::fs::remove_dir_all(tmp_dir)?;
result
}
fn edit_sudoers_file(
existed: bool,
mut sudoers_file: File,
sudoers_path: &Path,
lock: FileLock,
mut tmp_file: File,
tmp_path: &Path,
) -> io::Result<()> {
let mut editor_path = None;
let mut sudoers_contents = Vec::new();
if existed {
// If the sudoers file existed, read its contents and write them into the temporary file.
sudoers_file.read_to_end(&mut sudoers_contents)?;
// Rewind the sudoers file so it can be written later.
sudoers_file.rewind()?;
// Write to the temporary file.
tmp_file.write_all(&sudoers_contents)?;
let (sudoers, errors) = Sudoers::read(sudoers_contents.as_slice(), sudoers_path)?;
if errors.is_empty() {
editor_path = sudoers.solve_editor_path();
}
}
let editor_path = match editor_path {
Some(path) => path,
None => editor_path_fallback()?,
};
let mut stderr = io::stderr();
loop {
Command::new(&editor_path)
.arg("--")
.arg(tmp_path)
.spawn()?
.wait_with_output()?;
let (_sudoers, errors) = File::open(tmp_path)
.and_then(|reader| Sudoers::read(reader, tmp_path))
.map_err(|err| {
io_msg!(
err,
"unable to re-open temporary file ({}), {} unchanged",
tmp_path.display(),
sudoers_path.display()
)
})?;
if errors.is_empty() {
break;
}
writeln!(stderr, "Come on... you can do better than that.\n")?;
for crate::sudoers::Error(_position, message) in errors {
writeln!(stderr, "syntax error: {message}")?;
}
writeln!(stderr)?;
let stdin = io::stdin();
let stdout = io::stdout();
let mut stdin_handle = stdin.lock();
let mut stdout_handle = stdout.lock();
loop {
stdout_handle
.write_all("What now? e(x)it without saving / (e)dit again: ".as_bytes())?;
stdout_handle.flush()?;
let mut input = [0u8];
if let Err(err) = stdin_handle.read_exact(&mut input) {
writeln!(stderr, "visudo: cannot read user input: {err}")?;
return Ok(());
}
match &input {
b"e" => break,
b"x" => return Ok(()),
input => writeln!(stderr, "Invalid option: {:?}\n", std::str::from_utf8(input))?,
}
}
}
let tmp_contents = std::fs::read(tmp_path)?;
// Only write to the sudoers file if the contents changed.
if tmp_contents == sudoers_contents {
writeln!(stderr, "visudo: {} unchanged", tmp_path.display())?;
} else {
sudoers_file.write_all(&tmp_contents)?;
}
lock.unlock()?;
Ok(())
}
fn editor_path_fallback() -> io::Result<PathBuf> {
let path = Path::new("/usr/bin/editor");
if can_execute(path) {
return Ok(path.to_owned());
}
Err(io::Error::new(
io::ErrorKind::NotFound,
"cannot find text editor",
))
}
macro_rules! cstr {
($expr:expr) => {{
let _: &'static [u8] = $expr;
debug_assert!(std::ffi::CStr::from_bytes_with_nul($expr).is_ok());
// SAFETY: see `debug_assert!` above
unsafe { CStr::from_bytes_with_nul_unchecked($expr) }
}};
}
fn create_temporary_dir() -> io::Result<PathBuf> {
let template = cstr!(b"/tmp/sudoers-XXXXXX\0").to_owned();
let ptr = unsafe { libc::mkdtemp(template.into_raw()) };
if ptr.is_null() {
return Err(io::Error::last_os_error());
}
let path = OsString::from_vec(unsafe { CString::from_raw(ptr) }.into_bytes()).into();
Ok(path)
}
|
use std::{
collections::{HashMap, HashSet},
future::Future,
sync::{Arc, Weak},
time::Duration,
};
use bson::oid::ObjectId;
use futures_util::{
stream::{FuturesUnordered, StreamExt},
FutureExt,
};
use tokio::sync::{
mpsc::{self, UnboundedReceiver, UnboundedSender},
watch::{self, Ref},
};
use crate::{
client::options::{ClientOptions, ServerAddress},
cmap::{
conn::ConnectionGeneration,
establish::{ConnectionEstablisher, EstablisherOptions},
Command,
Connection,
PoolGeneration,
},
error::{load_balanced_mode_mismatch, Error, Result},
event::sdam::{
handle_sdam_event,
SdamEvent,
ServerClosedEvent,
ServerDescriptionChangedEvent,
ServerOpeningEvent,
TopologyClosedEvent,
TopologyDescriptionChangedEvent,
TopologyOpeningEvent,
},
runtime::{self, AcknowledgedMessage, WorkerHandle, WorkerHandleListener},
selection_criteria::SelectionCriteria,
ClusterTime,
ServerInfo,
ServerType,
TopologyType,
};
#[cfg(feature = "tracing-unstable")]
use crate::trace::topology::TopologyTracingEventEmitter;
use super::{
monitor::{MonitorManager, MonitorRequestReceiver},
srv_polling::SrvPollingMonitor,
Monitor,
Server,
ServerDescription,
TopologyDescription,
TransactionSupportStatus,
};
/// A struct providing access to the client's current view of the topology.
/// When this is dropped, monitors will stop performing checks.
#[derive(Debug)]
pub(crate) struct Topology {
#[cfg(any(feature = "tracing-unstable", test))]
pub(crate) id: ObjectId,
watcher: TopologyWatcher,
updater: TopologyUpdater,
_worker_handle: WorkerHandle,
}
impl Topology {
pub(crate) fn new(options: ClientOptions) -> Result<Topology> {
let description = TopologyDescription::default();
let id = ObjectId::new();
let event_emitter =
if options.sdam_event_handler.is_some() || cfg!(feature = "tracing-unstable") {
let user_handler = options.sdam_event_handler.clone();
#[cfg(feature = "tracing-unstable")]
let tracing_emitter =
TopologyTracingEventEmitter::new(options.tracing_max_document_length_bytes, id);
let (tx, mut rx) = mpsc::unbounded_channel::<AcknowledgedMessage<SdamEvent>>();
runtime::execute(async move {
while let Some(event) = rx.recv().await {
let (event, ack) = event.into_parts();
if let Some(ref user_handler) = user_handler {
#[cfg(feature = "tracing-unstable")]
handle_sdam_event(user_handler.as_ref(), event.clone());
#[cfg(not(feature = "tracing-unstable"))]
handle_sdam_event(user_handler.as_ref(), event);
}
#[cfg(feature = "tracing-unstable")]
handle_sdam_event(&tracing_emitter, event);
ack.acknowledge(());
}
});
Some(SdamEventEmitter { sender: tx })
} else {
None
};
let (updater, update_receiver) = TopologyUpdater::channel();
let (worker_handle, handle_listener) = WorkerHandleListener::channel();
let state = TopologyState {
description: description.clone(),
servers: Default::default(),
};
let (watcher, publisher) = TopologyWatcher::channel(state);
let connection_establisher =
ConnectionEstablisher::new(EstablisherOptions::from_client_options(&options))?;
let worker = TopologyWorker {
id,
topology_description: description,
servers: Default::default(),
update_receiver,
publisher,
options,
topology_watcher: watcher.clone(),
topology_updater: updater.clone(),
handle_listener,
event_emitter,
connection_establisher,
};
worker.start();
Ok(Topology {
#[cfg(any(feature = "tracing-unstable", test))]
id,
watcher,
updater,
_worker_handle: worker_handle,
})
}
/// Begin watching for changes in the topology.
pub(crate) fn watch(&self) -> TopologyWatcher {
let mut watcher = self.watcher.clone();
// mark the latest topology as seen
watcher.receiver.borrow_and_update();
watcher
}
#[cfg(test)]
pub(crate) fn clone_updater(&self) -> TopologyUpdater {
self.updater.clone()
}
/// Handle an error that occurred during operation execution.
pub(crate) async fn handle_application_error(
&self,
address: ServerAddress,
error: Error,
phase: HandshakePhase,
) {
self.updater
.handle_application_error(address, error, phase)
.await;
}
/// Get the topology's currently highest seen cluster time.
pub(crate) fn cluster_time(&self) -> Option<ClusterTime> {
self.watcher
.peek_latest()
.description
.cluster_time()
.cloned()
}
/// Update the topology's highest seen cluster time.
/// If the provided cluster time is not higher than the topology's currently highest seen
/// cluster time, this method has no effect.
pub(crate) async fn advance_cluster_time(&self, to: ClusterTime) {
self.updater.advance_cluster_time(to).await;
}
pub(crate) fn topology_type(&self) -> TopologyType {
self.watcher.peek_latest().description.topology_type
}
pub(crate) fn logical_session_timeout(&self) -> Option<Duration> {
self.watcher
.peek_latest()
.description
.logical_session_timeout
}
/// Gets the latest information on whether transactions are support or not.
pub(crate) fn transaction_support_status(&self) -> TransactionSupportStatus {
self.watcher
.peek_latest()
.description
.transaction_support_status()
}
/// Updates the given `command` as needed based on the `criteria`.
pub(crate) fn update_command_with_read_pref<T>(
&self,
server_address: &ServerAddress,
command: &mut Command<T>,
criteria: Option<&SelectionCriteria>,
) {
self.watcher
.peek_latest()
.description
.update_command_with_read_pref(server_address, command, criteria)
}
pub(crate) async fn shutdown(&self) {
self.updater.shutdown().await;
}
pub(crate) async fn warm_pool(&self) {
self.updater.fill_pool().await;
}
/// Gets the addresses of the servers in the cluster.
#[cfg(test)]
pub(crate) fn server_addresses(&mut self) -> HashSet<ServerAddress> {
self.servers().into_keys().collect()
}
/// Gets the addresses of the servers in the cluster.
/// If the topology hasn't opened yet, this will wait for it.
#[cfg(test)]
pub(crate) fn servers(&mut self) -> HashMap<ServerAddress, Arc<Server>> {
self.watcher.peek_latest().servers()
}
#[cfg(test)]
pub(crate) fn description(&self) -> TopologyDescription {
self.watcher.peek_latest().description.clone()
}
#[cfg(test)]
pub(crate) async fn sync_workers(&self) {
self.updater.sync_workers().await;
}
}
#[derive(Debug, Clone)]
pub(crate) struct TopologyState {
pub(crate) description: TopologyDescription,
servers: HashMap<ServerAddress, Weak<Server>>,
}
impl TopologyState {
/// Get a HashMap of strong references to the underlying servers in the state, filtering out any
/// servers that are no longer part of the current topology.
pub(crate) fn servers(&self) -> HashMap<ServerAddress, Arc<Server>> {
let mut out = HashMap::new();
for (k, v) in self.servers.iter() {
if let Some(server) = v.upgrade() {
out.insert(k.clone(), server);
}
}
out
}
}
#[derive(Debug)]
pub(crate) enum UpdateMessage {
AdvanceClusterTime(ClusterTime),
ServerUpdate(Box<ServerDescription>),
SyncHosts(HashSet<ServerAddress>),
MonitorError {
address: ServerAddress,
error: Error,
},
ApplicationError {
address: ServerAddress,
error: Error,
phase: HandshakePhase,
},
Broadcast(BroadcastMessage),
}
#[derive(Debug, Clone)]
pub(crate) enum BroadcastMessage {
Shutdown,
FillPool,
#[cfg(test)]
SyncWorkers,
}
/// Struct modeling the worker task that owns the actual topology state and processes updates to it.
struct TopologyWorker {
/// Unique ID for the topology.
id: ObjectId,
/// Receiver used to listen for updates to the topology from monitors or operation execution.
update_receiver: TopologyUpdateReceiver,
/// Listener used to determine when to stop this worker.
handle_listener: WorkerHandleListener,
/// Channel used to publish new topology information (e.g. so that operations can perform
/// server selection)
publisher: TopologyPublisher,
/// Map of addresses to servers in the topology. Once servers are dropped from this map, they
/// will cease to be monitored and their connection pools will be closed.
servers: HashMap<ServerAddress, MonitoredServer>,
/// The current TopologyDescription.
topology_description: TopologyDescription,
connection_establisher: ConnectionEstablisher,
event_emitter: Option<SdamEventEmitter>,
options: ClientOptions,
// the following fields stored here for creating new server monitors
topology_watcher: TopologyWatcher,
topology_updater: TopologyUpdater,
}
impl TopologyWorker {
/// Open the topology by populating it with the initial seed list provided in the options.
/// This will kick off the monitoring tasks for the servers included in the seedlist, as well as
/// the SRV polling monitor.
async fn initialize(&mut self) {
self.emit_event(|| {
SdamEvent::TopologyOpening(TopologyOpeningEvent {
topology_id: self.id,
})
});
let mut new_description = self.topology_description.clone();
new_description.initialize(&self.options);
self.update_topology(new_description).await;
if self.options.load_balanced == Some(true) {
let base = ServerDescription::new(self.options.hosts[0].clone());
self.update_server(ServerDescription {
server_type: ServerType::LoadBalancer,
average_round_trip_time: None,
..base
})
.await;
}
if self.monitoring_enabled() {
SrvPollingMonitor::start(
self.topology_updater.clone(),
self.topology_watcher.clone(),
self.options.clone(),
);
}
#[cfg(test)]
let _ = self.publisher.initialized_sender.send(true);
}
fn start(mut self) {
runtime::execute(async move {
self.initialize().await;
let mut shutdown_ack = None;
loop {
tokio::select! {
Some(update) = self.update_receiver.recv() => {
let (update, ack) = update.into_parts();
let mut ack = Some(ack);
let changed = match update {
UpdateMessage::AdvanceClusterTime(to) => {
self.advance_cluster_time(to);
true
}
UpdateMessage::SyncHosts(hosts) => {
self.sync_hosts(hosts).await
}
UpdateMessage::ServerUpdate(sd) => self.update_server(*sd).await,
UpdateMessage::MonitorError { address, error } => {
self.handle_monitor_error(address, error).await
}
UpdateMessage::ApplicationError {
address,
error,
phase,
} => self.handle_application_error(address, error, phase).await,
UpdateMessage::Broadcast(msg) => {
let rxen: FuturesUnordered<_> = self
.servers
.values()
.map(|v| v.pool.broadcast(msg.clone()))
.collect();
let _: Vec<_> = rxen.collect().await;
if matches!(msg, BroadcastMessage::Shutdown) {
shutdown_ack = ack.take();
break
}
false
}
};
if let Some(ack) = ack {
ack.acknowledge(changed);
}
},
_ = self.handle_listener.wait_for_all_handle_drops() => {
break
}
}
}
// indicate to the topology watchers that the topology is no longer alive
drop(self.publisher);
// Close all the monitors.
let mut close_futures = FuturesUnordered::new();
for (address, server) in self.servers.into_iter() {
if let Some(ref emitter) = self.event_emitter {
emitter
.emit(SdamEvent::ServerClosed(ServerClosedEvent {
address,
topology_id: self.id,
}))
.await;
}
drop(server.inner);
close_futures.push(server.monitor_manager.close_monitor());
}
while close_futures.next().await.is_some() {}
if let Some(emitter) = self.event_emitter {
if !self.topology_description.servers.is_empty()
&& self.options.load_balanced != Some(true)
{
let previous_description = self.topology_description;
let mut new_description = previous_description.clone();
new_description.servers.clear();
emitter
.emit(SdamEvent::TopologyDescriptionChanged(Box::new(
TopologyDescriptionChangedEvent {
topology_id: self.id,
previous_description: previous_description.into(),
new_description: new_description.into(),
},
)))
.await;
}
emitter
.emit(SdamEvent::TopologyClosed(TopologyClosedEvent {
topology_id: self.id,
}))
.await;
}
if let Some(ack) = shutdown_ack {
ack.acknowledge(true);
}
});
}
/// Publish the current TopologyDescription and map of Servers.
fn publish_state(&self) {
let servers = self
.servers
.iter()
.map(|(k, v)| (k.clone(), Arc::downgrade(&v.inner)))
.collect();
self.publisher.publish_new_state(TopologyState {
description: self.topology_description.clone(),
servers,
})
}
fn advance_cluster_time(&mut self, to: ClusterTime) {
self.topology_description.advance_cluster_time(&to);
self.publish_state()
}
async fn sync_hosts(&mut self, hosts: HashSet<ServerAddress>) -> bool {
let mut new_description = self.topology_description.clone();
new_description.sync_hosts(&hosts);
self.update_topology(new_description).await
}
/// Update the topology using the provided `ServerDescription`.
async fn update_server(&mut self, sd: ServerDescription) -> bool {
// TODO: RUST-1270 change this method to not return a result.
let mut new_description = self.topology_description.clone();
let _ = new_description.update(sd);
self.update_topology(new_description).await
}
/// Emit the appropriate SDAM monitoring events given the changes to the
/// topology as the result of an update, if any.
async fn update_topology(&mut self, new_topology_description: TopologyDescription) -> bool {
let old_description =
std::mem::replace(&mut self.topology_description, new_topology_description);
let diff = old_description.diff(&self.topology_description);
let changed = diff.is_some();
if let Some(diff) = diff {
#[cfg(not(test))]
let changed_servers = diff.changed_servers;
// For ordering of events in tests, sort the addresses.
#[cfg(test)]
let changed_servers = {
let mut servers = diff.changed_servers.into_iter().collect::<Vec<_>>();
servers.sort_by_key(|(addr, _)| match addr {
ServerAddress::Tcp { host, port } => (host, port),
#[cfg(unix)]
ServerAddress::Unix { .. } => unreachable!(),
});
servers
};
for (address, (previous_description, new_description)) in changed_servers {
if new_description.server_type.is_data_bearing()
|| (new_description.server_type != ServerType::Unknown
&& self.topology_description.topology_type() == TopologyType::Single)
{
if let Some(s) = self.servers.get(address) {
s.pool.mark_as_ready().await;
}
}
self.emit_event(|| {
SdamEvent::ServerDescriptionChanged(Box::new(ServerDescriptionChangedEvent {
address: address.clone(),
topology_id: self.id,
previous_description: ServerInfo::new_owned(previous_description.clone()),
new_description: ServerInfo::new_owned(new_description.clone()),
}))
});
}
#[cfg(not(test))]
let removed_addresses = diff.removed_addresses;
#[cfg(test)]
let removed_addresses = {
let mut addresses = diff.removed_addresses.into_iter().collect::<Vec<_>>();
addresses.sort_by_key(|addr| match addr {
ServerAddress::Tcp { host, port } => (host, port),
#[cfg(unix)]
ServerAddress::Unix { .. } => unreachable!(),
});
addresses
};
for address in removed_addresses {
let removed_server = self.servers.remove(address);
debug_assert!(
removed_server.is_some(),
"tried to remove non-existent address from topology: {}",
address
);
self.emit_event(|| {
SdamEvent::ServerClosed(ServerClosedEvent {
address: address.clone(),
topology_id: self.id,
})
});
}
self.emit_event(|| {
SdamEvent::TopologyDescriptionChanged(Box::new(TopologyDescriptionChangedEvent {
topology_id: self.id,
previous_description: old_description.clone().into(),
new_description: self.topology_description.clone().into(),
}))
});
#[cfg(not(test))]
let added_addresses = diff.added_addresses;
#[cfg(test)]
let added_addresses = {
let mut addresses = diff.added_addresses.into_iter().collect::<Vec<_>>();
addresses.sort_by_key(|addr| match addr {
ServerAddress::Tcp { host, port } => (host, port),
#[cfg(unix)]
ServerAddress::Unix { .. } => unreachable!(),
});
addresses
};
for address in added_addresses {
if self.servers.contains_key(address) {
debug_assert!(
false,
"adding address that already exists in topology: {}",
address
);
continue;
}
let (monitor_handle, listener) = WorkerHandleListener::channel();
let monitor_manager = MonitorManager::new(monitor_handle);
let monitor_request_receiver = MonitorRequestReceiver::new(
&monitor_manager,
self.topology_watcher.subscribe_to_topology_check_requests(),
listener,
);
let server = Server::new(
address.clone(),
self.options.clone(),
self.connection_establisher.clone(),
self.topology_updater.clone(),
self.id,
);
self.servers.insert(
address.clone(),
MonitoredServer {
inner: server,
monitor_manager,
},
);
if self.monitoring_enabled() {
Monitor::start(
address.clone(),
self.topology_updater.clone(),
self.topology_watcher.clone(),
self.event_emitter.clone(),
monitor_request_receiver,
self.options.clone(),
self.connection_establisher.clone(),
);
}
self.emit_event(|| {
SdamEvent::ServerOpening(ServerOpeningEvent {
address: address.clone(),
topology_id: self.id,
})
});
}
}
self.publish_state();
changed
}
/// Mark the server at the given address as Unknown using the provided error as the cause.
async fn mark_server_as_unknown(&mut self, address: ServerAddress, error: Error) -> bool {
let description = ServerDescription::new_from_error(address, error);
self.update_server(description).await
}
/// Handle an error that occurred during opreration execution.
pub(crate) async fn handle_application_error(
&mut self,
address: ServerAddress,
error: Error,
handshake: HandshakePhase,
) -> bool {
// If the error was due to a misconfigured query, no need to update the topology.
// e.g. using loadBalanced=true when the server isn't configured to be used with a load
// balancer.
if error.is_incompatible_server() {
return false;
}
match self.server_description(&address) {
Some(sd) => {
if let Some(existing_tv) = sd.topology_version() {
if let Some(tv) = error.topology_version() {
// If the error is from a stale topology version, ignore it.
if !tv.is_more_recent_than(existing_tv) {
return false;
}
}
}
}
None => return false,
}
let mut server = match self.server(&address) {
Some(s) => s,
None => return false,
};
match &handshake {
HandshakePhase::PreHello { generation } => {
match (generation, server.pool.generation()) {
(PoolGeneration::Normal(hgen), PoolGeneration::Normal(sgen)) => {
if *hgen < sgen {
return false;
}
}
// Pre-hello handshake errors are ignored in load-balanced mode.
(PoolGeneration::LoadBalanced(_), PoolGeneration::LoadBalanced(_)) => {
return false
}
_ => load_balanced_mode_mismatch!(false),
}
}
HandshakePhase::PostHello { generation }
| HandshakePhase::AfterCompletion { generation, .. } => {
if generation.is_stale(&server.pool.generation()) {
return false;
}
}
}
let is_load_balanced =
self.topology_description.topology_type() == TopologyType::LoadBalanced;
if error.is_state_change_error() {
let updated =
is_load_balanced || self.mark_server_as_unknown(address, error.clone()).await;
if updated && (error.is_shutting_down() || handshake.wire_version().unwrap_or(0) < 8) {
server.pool.clear(error, handshake.service_id()).await;
}
server.monitor_manager.request_immediate_check();
updated
} else if error.is_non_timeout_network_error()
|| (handshake.is_before_completion()
&& (error.is_auth_error()
|| error.is_network_timeout()
|| error.is_command_error()))
{
let updated = if is_load_balanced {
// Only clear the pool in load balanced mode if we got far enough in the handshake
// to determine a serviceId.
handshake.service_id().is_some()
} else {
self.mark_server_as_unknown(server.address.clone(), error.clone())
.await
};
if updated {
server
.pool
.clear(error.clone(), handshake.service_id())
.await;
if !error.is_auth_error() {
server.monitor_manager.cancel_in_progress_check(error);
}
}
updated
} else {
false
}
}
/// Handle an error that occurred during a monitor check.
pub(crate) async fn handle_monitor_error(
&mut self,
address: ServerAddress,
error: Error,
) -> bool {
match self.server(&address) {
Some(server) => {
let updated = self.mark_server_as_unknown(address, error.clone()).await;
if updated {
// The heartbeat monitor is disabled in load-balanced mode, so this will never
// have a service id.
server.pool.clear(error, None).await;
}
updated
}
None => false,
}
}
/// Get the server at the provided address if present in the topology.
fn server(&self, address: &ServerAddress) -> Option<MonitoredServer> {
self.servers.get(address).cloned()
}
/// Get the server at the provided address if present in the topology.
fn server_description(&self, address: &ServerAddress) -> Option<ServerDescription> {
self.topology_description
.get_server_description(address)
.cloned()
}
fn emit_event(&self, make_event: impl FnOnce() -> SdamEvent) {
if let Some(ref emitter) = self.event_emitter {
#[allow(clippy::let_underscore_future)]
let _ = emitter.emit(make_event());
}
}
fn monitoring_enabled(&self) -> bool {
#[cfg(test)]
{
self.options
.test_options
.as_ref()
.map(|to| to.disable_monitoring_threads)
!= Some(true)
&& self.options.load_balanced != Some(true)
}
#[cfg(not(test))]
{
self.options.load_balanced != Some(true)
}
}
}
/// Struct used to update the topology.
#[derive(Debug, Clone)]
pub(crate) struct TopologyUpdater {
sender: mpsc::UnboundedSender<AcknowledgedMessage<UpdateMessage, bool>>,
}
impl TopologyUpdater {
pub(crate) fn channel() -> (TopologyUpdater, TopologyUpdateReceiver) {
let (tx, rx) = tokio::sync::mpsc::unbounded_channel();
let updater = TopologyUpdater { sender: tx };
let update_receiver = TopologyUpdateReceiver {
update_receiver: rx,
};
(updater, update_receiver)
}
/// Send an update message to the topology.
async fn send_message(&self, update: UpdateMessage) -> bool {
let (message, receiver) = AcknowledgedMessage::package(update);
match self.sender.send(message) {
Ok(_) => receiver.wait_for_acknowledgment().await.unwrap_or(false),
_ => false,
}
}
/// Handle an error that occurred during a monitor check.
pub(crate) async fn handle_monitor_error(&self, address: ServerAddress, error: Error) -> bool {
self.send_message(UpdateMessage::MonitorError { address, error })
.await
}
/// Handle an error that occurred during operation execution.
pub(crate) async fn handle_application_error(
&self,
address: ServerAddress,
error: Error,
phase: HandshakePhase,
) -> bool {
self.send_message(UpdateMessage::ApplicationError {
address,
error,
phase,
})
.await
}
/// Update the topology using the provided server description, returning a bool
/// indicating whether the topology changed as a result of the update.
pub(crate) async fn update(&self, sd: ServerDescription) -> bool {
self.send_message(UpdateMessage::ServerUpdate(Box::new(sd)))
.await
}
pub(crate) async fn advance_cluster_time(&self, to: ClusterTime) {
self.send_message(UpdateMessage::AdvanceClusterTime(to))
.await;
}
/// Update the provided state to contain the given list of hosts, removing any
/// existing servers whose addresses aren't present in the list.
///
/// This will start server monitors for the newly added servers.
pub(crate) async fn sync_hosts(&self, hosts: HashSet<ServerAddress>) {
self.send_message(UpdateMessage::SyncHosts(hosts)).await;
}
pub(crate) async fn shutdown(&self) {
self.send_message(UpdateMessage::Broadcast(BroadcastMessage::Shutdown))
.await;
}
pub(crate) async fn fill_pool(&self) {
self.send_message(UpdateMessage::Broadcast(BroadcastMessage::FillPool))
.await;
}
#[cfg(test)]
pub(crate) async fn sync_workers(&self) {
self.send_message(UpdateMessage::Broadcast(BroadcastMessage::SyncWorkers))
.await;
}
}
/// Receiver used to listen for updates to the topology.
pub(crate) struct TopologyUpdateReceiver {
update_receiver: UnboundedReceiver<AcknowledgedMessage<UpdateMessage, bool>>,
}
impl TopologyUpdateReceiver {
pub(crate) async fn recv(&mut self) -> Option<AcknowledgedMessage<UpdateMessage, bool>> {
self.update_receiver.recv().await
}
}
/// Struct used to get the latest topology state and monitor the topology for changes.
#[derive(Debug, Clone)]
pub(crate) struct TopologyWatcher {
/// Receiver for the latest set of servers and latest TopologyDescription published by the
/// topology.
receiver: watch::Receiver<TopologyState>,
/// Sender used to request a check of the entire topology. The number indicates how many
/// operations have requested an update and are waiting for the topology to change.
sender: Arc<watch::Sender<u32>>,
/// Whether or not this watcher incremented the count in `sender`.
requested_check: bool,
#[cfg(test)]
initialized_receiver: watch::Receiver<bool>,
}
impl TopologyWatcher {
fn channel(initial_state: TopologyState) -> (TopologyWatcher, TopologyPublisher) {
#[cfg(test)]
let (initialized_sender, initialized_receiver) = watch::channel(false);
let (tx, rx) = watch::channel(initial_state);
let watcher = TopologyWatcher {
receiver: rx,
sender: Arc::new(watch::channel(0).0),
requested_check: false,
#[cfg(test)]
initialized_receiver,
};
let publisher = TopologyPublisher {
state_sender: tx,
#[cfg(test)]
initialized_sender,
};
(watcher, publisher)
}
/// Whether the topology is still active or if all `Client` instances using it have gone
/// out of scope.
pub(crate) fn is_alive(&self) -> bool {
self.receiver.has_changed().is_ok()
}
/// Get a server description for the server at the provided address.
pub(crate) fn server_description(&self, address: &ServerAddress) -> Option<ServerDescription> {
self.receiver
.borrow()
.description
.get_server_description(address)
.cloned()
}
/// Clone the latest state, marking it as seen.
pub(crate) fn observe_latest(&mut self) -> TopologyState {
self.receiver.borrow_and_update().clone()
}
fn subscribe_to_topology_check_requests(&self) -> TopologyCheckRequestReceiver {
TopologyCheckRequestReceiver {
receiver: self.sender.subscribe(),
}
}
/// Request that all the monitors associated with the topology perform immediate checks.
pub(crate) fn request_immediate_check(&mut self) {
if self.requested_check {
return;
}
self.requested_check = true;
// Increment the number of operations waiting for a topology update. When the monitors
// see this, they'll perform checks as soon as possible.
// Once a change is detected or this watcher is dropped, this will be decremented again.
self.sender.send_modify(|counter| *counter += 1);
}
/// Wait for a new state to be published or for the timeout to be reached, returning a bool
/// indicating whether an update was seen or not.
///
/// This method marks the new topology state as seen.
pub(crate) async fn wait_for_update(&mut self, timeout: impl Into<Option<Duration>>) -> bool {
let changed = if let Some(timeout) = timeout.into() {
matches!(
runtime::timeout(timeout, self.receiver.changed()).await,
Ok(Ok(()))
)
} else {
self.receiver.changed().await.is_ok()
};
if changed {
self.retract_immediate_check_request();
}
changed
}
fn retract_immediate_check_request(&mut self) {
if self.requested_check {
self.requested_check = false;
self.sender.send_modify(|count| *count -= 1);
}
}
/// Borrow the latest state. This does not mark it as seen.
///
/// Note: this method holds a read lock on the state, so it is best if the borrow is
/// short-lived. For longer use-cases, clone the `TopologyState` or use `observe_latest`
/// instead.
pub(crate) fn peek_latest(&self) -> Ref<TopologyState> {
self.receiver.borrow()
}
pub(crate) fn topology_type(&self) -> TopologyType {
self.peek_latest().description.topology_type
}
/// Wait until the topology worker has had time to initialize from the initial seed list and
/// options.
#[cfg(test)]
pub(crate) async fn wait_until_initialized(&mut self) {
while !*self.initialized_receiver.borrow() {
if self.initialized_receiver.changed().await.is_err() {
return;
}
}
}
}
impl Drop for TopologyWatcher {
fn drop(&mut self) {
self.retract_immediate_check_request();
}
}
/// Struct used to broadcast the latest view of the topology.
struct TopologyPublisher {
state_sender: watch::Sender<TopologyState>,
/// Sender used (in tests) to indicate when the Topology has been initialized from the inital
/// seed list and options.
#[cfg(test)]
initialized_sender: watch::Sender<bool>,
}
impl TopologyPublisher {
/// Publish a new state, notifying all the of the outstanding `TopologyWatcher`s.
///
/// Note that even if the provided state is equivalent to the previously broadcasted
/// `TopologyState`, the watchers will still be notified.
fn publish_new_state(&self, state: TopologyState) {
let _ = self.state_sender.send(state);
}
}
/// Handle used to emit SDAM events.
///
/// If the topology has been closed, events emitted via this handle will not be sent to
/// handlers.
#[derive(Clone)]
pub(crate) struct SdamEventEmitter {
sender: UnboundedSender<AcknowledgedMessage<SdamEvent>>,
}
impl SdamEventEmitter {
/// Emit an SDAM event.
///
/// This method returns a future that can be awaited until the event has been actually emitted.
/// It is not necessary to await this future.
pub(crate) fn emit(&self, event: impl Into<SdamEvent>) -> impl Future<Output = ()> {
let (msg, ack) = AcknowledgedMessage::package(event.into());
// if event handler has stopped listening, no more events should be emitted,
// so we can safely ignore any send errors here.
let _ = self.sender.send(msg);
ack.wait_for_acknowledgment().map(|_| ())
}
}
/// Enum describing a point in time during an operation's execution relative to when the MongoDB
/// handshake for the conection being used in that operation.
///
/// This is used to determine the error handling semantics for certain error types.
#[derive(Debug, Clone)]
pub(crate) enum HandshakePhase {
/// Describes a point that occurred before the initial hello completed (e.g. when opening the
/// socket).
PreHello { generation: PoolGeneration },
/// Describes a point in time after the initial hello has completed, but before the entire
/// handshake (e.g. including authentication) completes.
PostHello { generation: ConnectionGeneration },
/// Describes a point in time after the handshake completed (e.g. when the command was sent to
/// the server).
AfterCompletion {
generation: ConnectionGeneration,
max_wire_version: i32,
},
}
impl HandshakePhase {
pub(crate) fn after_completion(handshaked_connection: &Connection) -> Self {
Self::AfterCompletion {
generation: handshaked_connection.generation,
// given that this is a handshaked connection, the stream description should
// always be available, so 0 should never actually be returned here.
max_wire_version: handshaked_connection
.stream_description()
.ok()
.and_then(|sd| sd.max_wire_version)
.unwrap_or(0),
}
}
/// The `serviceId` reported by the server. If the initial hello has not completed, returns
/// `None`.
pub(crate) fn service_id(&self) -> Option<ObjectId> {
match self {
HandshakePhase::PreHello { .. } => None,
HandshakePhase::PostHello { generation, .. } => generation.service_id(),
HandshakePhase::AfterCompletion { generation, .. } => generation.service_id(),
}
}
/// Whether this phase is before the handshake completed or not.
fn is_before_completion(&self) -> bool {
!matches!(self, HandshakePhase::AfterCompletion { .. })
}
/// The wire version of the server as reported by the handshake. If the handshake did not
/// complete, this returns `None`.
fn wire_version(&self) -> Option<i32> {
match self {
HandshakePhase::AfterCompletion {
max_wire_version, ..
} => Some(*max_wire_version),
_ => None,
}
}
}
/// Struct used to receive topology-wide immediate check requests from operations in server
/// selection. Such requests can be made through a `TopologyWatcher`.
#[derive(Debug, Clone)]
pub(crate) struct TopologyCheckRequestReceiver {
/// This receives the number of operations that are blocked waiting for an update to the
/// topology. If the number is > 0, then that means the monitor should perform a check
/// ASAP.
///
/// A counter is used here instead of `()` so that operations can retract their requests. This
/// enables the monitor to unambiguiously determine whether a request is stale or not and
/// eliminates races between the monitor listening for check requests and operations
/// actually sending them.
receiver: watch::Receiver<u32>,
}
impl TopologyCheckRequestReceiver {
pub(crate) async fn wait_for_check_request(&mut self) {
while *self.receiver.borrow() == 0 {
// If all the requesters hung up, then just return early.
if self.receiver.changed().await.is_err() {
return;
};
}
}
}
/// Struct wrapping a [`Server`]. When this is dropped, the monitor for this server will close.
#[derive(Debug, Clone)]
struct MonitoredServer {
inner: Arc<Server>,
monitor_manager: MonitorManager,
}
impl std::ops::Deref for MonitoredServer {
type Target = Server;
fn deref(&self) -> &Self::Target {
self.inner.deref()
}
}
|
use std::env;
use std::process;
use common::load_file;
use multiarray::Array2D;
use std::collections::{HashMap, HashSet};
#[derive(Debug)]
struct Coord {
x: i32,
y: i32,
}
impl Coord {
fn dist(&self, other: Coord) -> i32 {
(self.x - other.x).abs() + (self.y - other.y).abs()
}
}
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
println!("day6 <file>");
process::exit(1);
}
let rows = load_file(&args[1]);
let coords: Vec<Coord> = rows
.iter()
.map(|r| r.split(", ").collect::<Vec<&str>>())
.map(|c| Coord {
x: c[0].parse::<i32>().unwrap(),
y: c[1].parse::<i32>().unwrap(),
})
.collect();
let max_x = coords.iter().map(|c| c.x).max().unwrap();
let max_y = coords.iter().map(|c| c.y).max().unwrap();
println!("Max: x={} y={}", max_x, max_y);
let mut board = Array2D::new([max_x as usize, max_y as usize], 0i32);
for x in 0..max_x {
for y in 0..max_y {
let mut distances: Vec<(usize, i32)> = coords
.iter()
.enumerate()
.map(|(i, c)| (i, c.dist(Coord { x: x, y: y })))
.collect();
distances.sort_by_key(|&(_, d)| d);
if distances[0].1 == distances[1].1 {
board[[x as usize, y as usize]] = -1;
} else {
board[[x as usize, y as usize]] = distances[0].0 as i32;
}
}
}
let mut sizes = HashMap::new();
for x in 0..max_x {
for y in 0..max_y {
let id = board[[x as usize, y as usize]];
if id == -1 {
continue;
}
let size_counter = sizes.entry(id).or_insert(0);
*size_counter += 1u32;
}
}
let mut infinite_ids = HashSet::new();
for x in 0..max_x {
infinite_ids.insert(board[[x as usize, 0]]);
infinite_ids.insert(board[[x as usize, (max_y - 1) as usize]]);
}
for y in 0..max_y {
infinite_ids.insert(board[[0, y as usize]]);
infinite_ids.insert(board[[(max_x - 1) as usize, y as usize]]);
}
// find all ids that face the edge of the board
// exclude them and do a max
let finite_sizes = sizes.iter().filter(|(id, _)| !infinite_ids.contains(id));
let max_area = finite_sizes.max_by_key(|&(_, size)| size).unwrap();
println!("Part 1: Max area by id {} area {}", max_area.0, max_area.1);
let mut area_size = 0;
for x in 0..max_x {
for y in 0..max_y {
let sum_dist: i32 = coords
.iter()
.enumerate()
.map(|(_i, c)| c.dist(Coord { x: x, y: y }))
.sum();
if sum_dist < 10000 {
area_size += 1;
}
}
}
println!("Part 2: Area is {}", area_size);
}
|
fn main() {
// no i++ ++i
// std::num::wrapping for overflow
} |
extern crate clap;
use crate::build::BuildSystemBase;
use clap::{App, Arg};
/// Entry point to generating Android.mk
/// Creates a Androidmk struct from input which then allows you..
/// to read architectures supported, .so libraries and more
pub fn read_input() -> BuildSystemBase {
let matches = App::new("Generate Android.mk / Android.bp automatically from prebuilt APK's")
.version("1.2.0")
.author("Behxhet S. <bensadiku65@gmail.com>")
.about("Generate Android.mk or Android.bp from a prebuilt APK ")
.arg(
Arg::with_name("input")
.short("i")
.long("input")
.required(true)
.help("Input APK file path you want to generate the mk or bp")
.takes_value(true),
)
.arg(
Arg::with_name("name")
.short("n")
.long("name")
.required(false)
.help("Name of the APK file you want to generate the mk or bp")
.takes_value(true),
)
.arg(
Arg::with_name("architecture")
.short("a")
.long("arch")
.required(false)
.help("Specify the default architecture")
.takes_value(true),
)
.arg(
Arg::with_name("os")
.short("o")
.long("os")
.required(false)
.help("Android OS version to generate the mk/bp (semver)"),
)
.arg(
Arg::with_name("dexpreopt")
.short("d")
.long("dex")
.required(false)
.takes_value(true)
.possible_values(&["true", "false"])
.hide_possible_values(false)
.help("To enable or disable pre-optimization. "),
)
.arg(
Arg::with_name("privileged")
.short("p")
.long("privileged")
.required(false)
.help("Make app privileged (priv-app)"),
)
.arg(
Arg::with_name("extract")
.short("e")
.long("extract")
.required(false)
.help("Extract .so libs /lib/<abi>/lib<name>.so"),
)
.arg(
Arg::with_name("verbose")
.short("v")
.long("verbose")
.required(false)
.help("Enable verbose debug logging"),
)
.arg(
Arg::with_name("soong")
.short("s")
.long("soong")
.required(false)
.help("Generate Soong Android.bp files from Android apk"),
)
.arg(
Arg::with_name("bazel")
.short("b")
.long("bazel")
.required(false)
.help("Generate Bazel BUILD files from Android apk"),
)
.get_matches();
// Input path of the apk, should never be empty!
let input = matches.value_of("input").unwrap();
// If empty, default to input
// Get file name without path or ext
let name = matches.value_of("name").unwrap_or("");
// Default architecture
// If not supplied, it will add all architectures found in APK
let default_architectures = matches.value_of("architecture").unwrap_or("");
// Did user specify a default architecture
let has_default_architecture = matches.is_present("architecture");
// Default to 6.0+
// Un-used for now!
let os = matches.value_of("os").unwrap_or("6.0");
// Privileged app
let privileged = matches.is_present("privileged");
// Ability to extract .so libs onto the directory
let extract_so = matches.is_present("extract");
// Enable logging
let debug = matches.is_present("verbose");
// Generate blueprint instead of makefile
let bp = matches.is_present("soong");
// Generate bazel instead of makefile
let bz = matches.is_present("bazel");
// Default selection is makefile
let mk = if !bp && !bz { true } else { false };
// --dex supports the values ‘true’ or ‘false’ to enable or disable pre-optimization, respectively.
let has_dex_flag = matches.is_present("dexpreopt");
let dex_flag = matches.value_of("dexpreopt").unwrap_or("true");
let dex = (
// Was the flag passed at all?
has_dex_flag,
// If it was, what's the value?
dex_flag == "true",
);
BuildSystemBase::new(
input,
name,
default_architectures,
has_default_architecture,
os,
dex,
privileged,
extract_so,
debug,
bp,
mk,
bz,
)
}
|
use derive_more::Display;
use diesel::result::{DatabaseErrorKind, Error as DBError};
use graphql_depth_limit::ExceedMaxDepth;
use juniper::graphql_value;
use std::convert::From;
use validator::ValidationErrors;
#[derive(Debug)]
pub struct DuplicateErrorInfo {
pub origin: String,
pub info: String,
}
#[allow(dead_code)]
#[derive(Debug, Display)]
pub enum ServiceError {
#[display(fmt = "Internal Server Error")]
InternalServerError,
#[display(fmt = "Unauthorized")]
Unauthorized,
#[display(fmt = "Duplicate")]
Duplicate(DuplicateErrorInfo),
#[display(fmt = "Validation Error")]
ValidationError(ValidationErrors),
#[display(fmt = "Unimplemented")]
Unimplemented,
MaxDepthLimit(ExceedMaxDepth),
}
impl juniper::IntoFieldError for ServiceError {
fn into_field_error(self) -> juniper::FieldError {
match self {
ServiceError::Unauthorized => juniper::FieldError::new(
"Unauthorized",
graphql_value!({
"type": "NO_ACCESS"
}),
),
ServiceError::Unimplemented => juniper::FieldError::new(
"This functionality is not implemented yet.",
graphql_value!({
"type": "UNIMPLEMENTED"
}),
),
ServiceError::Duplicate(error_info) => juniper::FieldError::new(
error_info.origin,
graphql_value!({
"type": "DUPLICATE_INFO"
}),
),
// Improve Logic for generating error messages for field validation errors
ServiceError::ValidationError(_err) => juniper::FieldError::new(
"Validation Error",
graphql_value!({
"type": "VALIDATION_ERROR"
}),
),
ServiceError::MaxDepthLimit(err) => {
let message = format!("{}", err);
juniper::FieldError::new(
"Max Depth Limit",
graphql_value!({
"type": "MAX_DEPTH_LIMIT",
"message": message
}),
)
}
_ => juniper::FieldError::new(
"Unknown Error",
graphql_value!({
"type": "UNKNOWN_ERROR",
}),
),
}
}
}
impl From<DBError> for ServiceError {
fn from(error: DBError) -> ServiceError {
// Right now we just care about UniqueViolation from diesel
// But this would be helpful to easily map errors as our app grows
match error {
DBError::DatabaseError(kind, _info) => {
if let DatabaseErrorKind::UniqueViolation = kind {
return ServiceError::Duplicate(DuplicateErrorInfo {
origin: _info.message().to_string(),
info: _info.details().unwrap_or("No Info").to_string(),
});
}
ServiceError::InternalServerError
}
_ => ServiceError::InternalServerError,
}
}
}
|
use serde::{Deserialize, Serialize};
use topology::provider;
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct MixProviderPresence {
pub location: String,
pub client_listener: String,
pub mixnet_listener: String,
pub pub_key: String,
pub registered_clients: Vec<MixProviderClient>,
pub last_seen: u64,
pub version: String,
}
impl Into<topology::provider::Node> for MixProviderPresence {
fn into(self) -> topology::provider::Node {
topology::provider::Node {
location: self.location,
client_listener: self.client_listener.parse().unwrap(),
mixnet_listener: self.mixnet_listener.parse().unwrap(),
pub_key: self.pub_key,
registered_clients: self
.registered_clients
.into_iter()
.map(|c| c.into())
.collect(),
last_seen: self.last_seen,
version: self.version,
}
}
}
impl From<topology::provider::Node> for MixProviderPresence {
fn from(mpn: provider::Node) -> Self {
MixProviderPresence {
location: mpn.location,
client_listener: mpn.client_listener.to_string(),
mixnet_listener: mpn.mixnet_listener.to_string(),
pub_key: mpn.pub_key,
registered_clients: mpn
.registered_clients
.into_iter()
.map(|c| c.into())
.collect(),
last_seen: mpn.last_seen,
version: mpn.version,
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct MixProviderClient {
pub pub_key: String,
}
impl Into<topology::provider::Client> for MixProviderClient {
fn into(self) -> topology::provider::Client {
topology::provider::Client {
pub_key: self.pub_key,
}
}
}
impl From<topology::provider::Client> for MixProviderClient {
fn from(mpc: topology::provider::Client) -> Self {
MixProviderClient {
pub_key: mpc.pub_key,
}
}
}
|
#[macro_use]
extern crate log;
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate validator_derive;
#[macro_use]
extern crate strum_macros;
use diesel::pg::PgConnection;
use diesel::r2d2::{ConnectionManager, Pool, PooledConnection};
use once_cell::sync::OnceCell;
use warp::{self, http::Method, path, Filter, Rejection, Reply};
type PgPool = Pool<ConnectionManager<PgConnection>>;
type PgPooled = PooledConnection<ConnectionManager<PgConnection>>;
mod utils;
mod authentication;
mod authorization;
mod helpers;
mod problem;
mod rate_limit;
mod schema;
mod controllers;
mod models;
mod response;
mod views;
mod mqtt;
mod websocket;
use response::{Response, ResponseBuilder};
static VERSION: &str = env!("CARGO_PKG_VERSION");
static DEFAULT_DATABASE_URL: &str = "postgres://astroplant:astroplant@database.ops/astroplant";
static DEFAULT_MQTT_HOST: &str = "mqtt.ops";
const DEFAULT_MQTT_PORT: u16 = 1883;
static DEFAULT_MQTT_USERNAME: &str = "server";
static DEFAULT_MQTT_PASSWORD: &str = "";
static TOKEN_SIGNER: OnceCell<astroplant_auth::token::TokenSigner> = OnceCell::new();
fn pg_pool() -> PgPool {
let manager = ConnectionManager::<PgConnection>::new(
std::env::var("DATABASE_URL").unwrap_or(DEFAULT_DATABASE_URL.to_owned()),
);
Pool::builder()
.connection_timeout(std::time::Duration::from_secs(5))
.build(manager)
.expect("PostgreSQL connection pool could not be created.")
}
#[tokio::main]
async fn main() {
env_logger::init();
init_token_signer();
let pg_pool = pg_pool();
// Start MQTT.
let (raw_measurement_receiver, kits_rpc) = mqtt::run(pg_pool.clone());
// Start WebSockets.
let (ws_endpoint, publisher) = astroplant_websocket::run();
tokio::runtime::Handle::current().spawn(websocket::run(publisher, raw_measurement_receiver));
let rate_limit = rate_limit::leaky_bucket();
let pg = helpers::pg(pg_pool);
let rest_endpoints = (path!("version")
.map(|| ResponseBuilder::ok().body(VERSION))
.or(path!("time")
.map(|| ResponseBuilder::ok().body(chrono::Utc::now().to_rfc3339()))
.boxed())
.unify()
.or(path!("kits" / ..).and(controllers::kit::router(pg.clone().boxed())))
.unify()
.or(path!("kit-configurations" / ..)
.and(controllers::kit_configuration::router(pg.clone().boxed())))
.unify()
.or(path!("kit-rpc" / ..).and(controllers::kit_rpc::router(kits_rpc, pg.clone().boxed())))
.unify()
.or(path!("users" / ..).and(controllers::user::router(pg.clone().boxed())))
.unify()
.or(path!("me" / ..).and(controllers::me::router(pg.clone().boxed())))
.unify()
.or(
path!("peripheral-definitions" / ..).and(controllers::peripheral_definition::router(
pg.clone().boxed(),
)),
)
.unify()
.or(path!("quantity-types" / ..)
.and(controllers::quantity_type::router(pg.clone().boxed())))
.unify()
.or(path!("permissions" / ..).and(controllers::permission::router(pg.clone().boxed())))
.unify()
.or(path!("measurements" / ..).and(controllers::measurement::router(pg.clone().boxed())))
.unify())
.and(warp::header("Accept"))
.map(|response: Response, _accept: String| {
// TODO: utilize Accept header, e.g. returning XML when requested.
let mut http_response_builder = warp::http::response::Builder::new()
.status(response.status_code())
.header("Content-Type", "application/json");
for (header, value) in response.headers() {
http_response_builder = http_response_builder.header(header.as_bytes(), value.clone());
}
match response.value() {
Some(value) => http_response_builder
.body(serde_json::to_string(value).unwrap())
.unwrap(),
None => http_response_builder.body("".to_owned()).unwrap(),
}
})
.recover(|rejection| async { handle_rejection(rejection) })
.with(warp::log("astroplant_rs_api::api"))
// TODO: this wrapper might be better placed per-endpoint, to have accurate allowed metods
.with(
warp::cors()
.allow_any_origin()
.allow_methods(vec![
Method::GET,
Method::POST,
Method::PUT,
Method::PATCH,
Method::DELETE,
Method::OPTIONS,
])
.allow_headers(vec!["Authorization", "Content-Type"]),
);
let all = rate_limit.and(ws_endpoint.or(rest_endpoints));
warp::serve(all).run(([0, 0, 0, 0], 8080)).await;
}
/// Convert rejections into replies.
fn handle_rejection(rejection: Rejection) -> Result<impl Reply, Rejection> {
use problem::{DescriptiveProblem, Problem};
let reply = if let Some(problem) = rejection.find::<Problem>() {
// This rejection originated in this implementation.
let descriptive_problem = DescriptiveProblem::from(problem);
warp::reply::with_status(
serde_json::to_string(&descriptive_problem).unwrap(),
problem.to_status_code(),
)
} else {
// This rejection originated in Warp.
let problem = if rejection.is_not_found() {
problem::NOT_FOUND
} else {
problem::INTERNAL_SERVER_ERROR
};
let descriptive_problem = DescriptiveProblem::from(&problem);
warp::reply::with_status(
serde_json::to_string(&descriptive_problem).unwrap(),
problem.to_status_code(),
)
};
Ok(warp::reply::with_header(
reply,
"Content-Type",
"application/problem+json",
))
}
/// Initialize the token signer.
///
/// # Panics
/// This function is only callable once; it panics if called multiple times.
fn init_token_signer() {
let key_file_path =
std::env::var("TOKEN_SIGNER_KEY").unwrap_or("./token_signer.key".to_owned());
debug!("Using token signer key file {}", key_file_path);
let token_signer_key: Vec<u8> = std::fs::read(&key_file_path).unwrap();
trace!(
"Using token signer key of {} bits",
token_signer_key.len() * 8
);
if TOKEN_SIGNER
.set(astroplant_auth::token::TokenSigner::new(token_signer_key))
.is_err()
{
panic!("Token signer initialization called more than once.")
}
}
|
use diesel::pg::PgConnection;
use diesel::prelude::*;
use dotenv::dotenv;
use std::env;
pub fn connection_without_transaction() -> PgConnection {
dotenv().ok();
let connection_url = env::var("TEST_DATABASE_URL").expect("No test database url set");
PgConnection::establish(&connection_url).unwrap()
}
pub fn connection() -> PgConnection {
let conn = connection_without_transaction();
conn.begin_test_transaction().unwrap();
conn
}
|
// Licensed under the 2-Clause BSD license <LICENSE or
// https://opensource.org/licenses/BSD-2-Clause>. This
// file may not be copied, modified, or distributed
// except according to those terms.
use Operation;
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]
#[repr(i32)]
pub enum InstructionOperation {
INVALID = 0i32,
AAA,
AAD,
AAM,
AAS,
ADD,
ADC,
AND,
ARPL,
BLENDPD,
BLENDPS,
BLENDVPD,
BLENDVPS,
BOUND,
BSF,
BSR,
BSWAP,
BT,
BTC,
BTR,
BTS,
CALLF,
CALL,
CLC,
CLD,
CLFLUSH,
CLI,
CLTS,
CMC,
CMP,
CMPXCH16B,
CMPXCH8B,
CMPXCHG,
CPUID,
CRC32,
DAA,
DAS,
DEC,
DIV,
DPPD,
DPPS,
EMMS,
ENTER,
F2XM1,
FABS,
FADD,
FADDP,
FBLD,
FBSTP,
FCHS,
FCLEX,
FCMOVB,
FCMOVBE,
FCMOVE,
FCMOVNB,
FCMOVNBE,
FCMOVNE,
FCMOVNU,
FCMOVU,
FCOM,
FCOMI,
FCOMIP,
FCOMP,
FCOMPP,
FCOS,
FDECSTP,
FDISI,
FDIV,
FDIVP,
FDIVR,
FDIVRP,
FEMMS,
FENI,
FFREE,
FFREEP,
FIADD,
FICOM,
FICOMP,
FIDIV,
FIDIVR,
FILD,
FIMUL,
FINCSTP,
FINIT,
FIST,
FISTP,
FISTTP,
FISUB,
FISUBR,
FLD,
FLD1,
FLDCW,
FLDENV,
FLDL2E,
FLDL2T,
FLDLG2,
FLDLN2,
FLDPI,
FLDZ,
FMUL,
FMULP,
FNOP,
FPATAN,
FPREM,
FPREM1,
FPTAN,
FRICHOP,
FRINEAR,
FRINT2,
FRNDINT,
FRSTOR,
FRSTPM,
FSAVE,
FSCALE,
FSETPM,
FSIN,
FSINCOS,
FSQRT,
FST,
FSTCW,
FSTDW,
FSTENV,
FSTP,
FSTSG,
FSTSW,
FSUB,
FSUBP,
FSUBR,
FSUBRP,
FTST,
FUCOM,
FUCOMI,
FUCOMIP,
FUCOMP,
FUCOMPP,
FWAIT,
FXAM,
FXCH,
FXRSTOR,
FXSAVE,
FXTRACT,
FYL2X,
FYL2XP1,
GETSEC,
HLT,
IDIV,
IMUL,
IN,
INC,
INT,
INT1,
INT3,
INTO,
INVD,
INVLPG,
IRET,
JMPF,
JMP,
LAHF,
LAR,
LDMXCSR,
LDS,
LEA,
LEAVE,
LES,
LFENCE,
LFS,
LGS,
LOOP,
LOOPE,
LOOPNE,
LSL,
LSS,
MFENCE,
MOV,
MOVNTI,
MOVSS,
MOVSX,
MOVSXD,
MOVUPD,
MOVUPS,
MOVZX,
MPSADBW,
MUL,
NEG,
NOP,
NOT,
OR,
OUT,
PACKSSDW,
PACKSSWB,
PACKUSDW,
PACKUSWB,
PABSB,
PABSD,
PABSW,
PADDB,
PADDD,
PADDQ,
PADDW,
PADDSB,
PADDSW,
PADDUSB,
PADDUSW,
PALIGNR,
PAND,
PANDN,
PAUSE,
PAVGB,
PAVGUSB,
PAVGW,
PBLENDVB,
PBLENDW,
PCMPEQB,
PCMPEQD,
PCMPEQQ,
PCMPEQW,
PCMPESTRI,
PCMPESTRM,
PCMPGTB,
PCMPGTD,
PCMPGTQ,
PCMPGTW,
PCMPISTRI,
PCMPISTRM,
PF2ID,
PF2IW,
PFACC,
PFADD,
PFCMPEQ,
PFCMPGE,
PFCMPGT,
PFMAX,
PFMIN,
PFMUL,
PFNACC,
PFPNACC,
PFRCP,
PFRCPIT1,
PFRCPIT2,
PFRCPV,
PFRSQIT1,
PFRSQRT,
PFRSQRTV,
PFSUB,
PFSUBR,
PHADDD,
PHADDSW,
PHADDW,
PHMINPOSUW,
PHSUBD,
PHSUBSW,
PHSUBW,
PI2FD,
PI2FW,
PMADDWD,
PMADDUBSW,
PMAXSB,
PMAXSD,
PMAXSW,
PMAXUB,
PMAXUD,
PMAXUW,
PMINSB,
PMINSD,
PMINSW,
PMINUB,
PMINUD,
PMINUW,
PMULDQ,
PMULHRSW,
PMULHRW,
PMULHUW,
PMULHW,
PMULLD,
PMULLW,
PMULUDQ,
POP,
POPCNT,
POR,
PSADBW,
PSHUFB,
PSIGNB,
PSIGND,
PSIGNW,
PSLLD,
PSLLDQ,
PSLLQ,
PSLLW,
PSRAD,
PSRAW,
PSRLD,
PSRLDQ,
PSRLQ,
PSRLW,
PSUBB,
PSUBD,
PSUBQ,
PSUBW,
PSUBSB,
PSUBSW,
PSUBUSB,
PSUBUSW,
PSWAPD,
PTEST,
PUNPCKHBW,
PUNPCKHDQ,
PUNPCKHQDQ,
PUNPCKHWD,
PUNPCKLQDQ,
PUSH,
PXOR,
RDMSR,
RDPMC,
RDTSC,
RETF,
RETN,
RCL,
RCR,
ROL,
ROR,
ROUNDPS,
ROUNDPD,
RSM,
SAHF,
SALC,
SAR,
SBB,
SFENCE,
SHL,
SHLD,
SHR,
SHRD,
SUB,
STC,
STD,
STI,
STMXCSR,
SYSCALL,
SYSENTER,
SYSEXIT,
SYSRET,
TEST,
UD2,
VMREAD,
VMWRITE,
WBINVD,
WRMSR,
XCHG,
XLAT,
XADD,
XOR,
XRSTOR,
XSAVE,
ADDPS,
ADDPD,
ADDSD,
ADDSS,
ADDSUBPD,
ADDSUBPS,
ANDNPS,
ANDNPD,
ANDPS,
ANDPD,
CBW,
CWDE,
CDQE,
CMPSB,
CMPSW,
CMPSD,
CMPSQ,
CMOVO,
CMOVNO,
CMOVB,
CMOVAE,
CMOVE,
CMOVNE,
CMOVBE,
CMOVA,
CMOVS,
CMOVNS,
CMOVPE,
CMOVPO,
CMOVL,
CMOVGE,
CMOVLE,
CMOVG,
CWD,
CDQ,
CQO,
DIVPS,
DIVPD,
DIVSD,
DIVSS,
INSB,
INSW,
INSD,
INSQ,
JCXZ,
JECXZ,
JRCXZ,
JO,
JNO,
JB,
JAE,
JE,
JNE,
JBE,
JA,
JS,
JNS,
JPE,
JPO,
JL,
JGE,
JLE,
JG,
LODSB,
LODSW,
LODSD,
LODSQ,
MAXPS,
MAXPD,
MAXSD,
MAXSS,
MINPS,
MINPD,
MINSD,
MINSS,
MOVD,
MOVQ,
MOVSB,
MOVSW,
MOVSD,
MOVSQ,
MULPS,
MULPD,
MULSD,
MULSS,
ORPS,
ORPD,
OUTSB,
OUTSW,
OUTSD,
OUTSQ,
PEXTRD,
PEXTRQ,
PINSRD,
PINSRQ,
POPA,
POPAD,
POPF,
POPFD,
POPFQ,
PUSHA,
PUSHAD,
PUSHF,
PUSHFD,
PUSHFQ,
RCPPS,
RCPSS,
RSQRTPS,
RSQRTSS,
SCASB,
SCASW,
SCASD,
SCASQ,
SETO,
SETNO,
SETB,
SETAE,
SETE,
SETNE,
SETBE,
SETA,
SETS,
SETNS,
SETPE,
SETPO,
SETL,
SETGE,
SETLE,
SETG,
SQRTPS,
SQRTPD,
SQRTSD,
SQRTSS,
STOSB,
STOSW,
STOSD,
STOSQ,
SUBPS,
SUBPD,
SUBSD,
SUBSS,
XORPS,
XORPD,
CMPPD,
CMPPS,
CMPSS,
COMISD,
COMISS,
CVTDQ2PD,
CVTDQ2PS,
CVTPD2DQ,
CVTPD2PI,
CVTPD2PS,
CVTPI2PD,
CVTPI2PS,
CVTPS2DQ,
CVTPS2PD,
CVTPS2PI,
CVTSD2SI,
CVTSD2SS,
CVTSI2SD,
CVTSI2SS,
CVTSS2SD,
CVTSS2SI,
CVTTPD2DQ,
CVTTPD2PI,
CVTTPS2DQ,
CVTTPS2PI,
CVTTSD2SI,
CVTTSS2SI,
EXTRACTPS,
HADDPD,
HADDPS,
HSUBPD,
HSUBPS,
INSERTPS,
LDDQU,
LGDT,
LIDT,
LLDT,
LMSW,
LTR,
MASKMOVQ,
MASKMOVDQU,
MMXNOP,
MONITOR,
MOVAPD,
MOVAPS,
MOVDDUP,
MOVDQ2Q,
MOVDQA,
MOVDQU,
MOVHLPS,
MOVHPD,
MOVHPS,
MOVSHDUP,
MOVSLDUP,
MOVLHPS,
MOVLPD,
MOVLPS,
MOVMSKPD,
MOVMSKPS,
MOVNTDQ,
MOVNTDQA,
MOVNTPD,
MOVNTPS,
MOVNTQ,
MOVQ2DQ,
MWAIT,
PINSRB,
PINSRW,
PEXTRB,
PEXTRW,
PMOVMSKB,
PMOVSXBD,
PMOVSXBQ,
PMOVSXDQ,
PMOVSXBW,
PMOVSXWD,
PMOVSXWQ,
PMOVZXBD,
PMOVZXBQ,
PMOVZXDQ,
PMOVZXBW,
PMOVZXWD,
PMOVZXWQ,
PREFETCH,
PREFETCHNTA,
PREFETCHT0,
PREFETCHT1,
PREFETCHT2,
PREFETCHW,
PSHUFD,
PSHUFHW,
PSHUFLW,
PSHUFW,
PUNPCKLBW,
PUNPCKLDQ,
PUNPCKLWD,
ROUNDSD,
ROUNDSS,
SGDT,
SIDT,
SLDT,
SHUFPD,
SHUFPS,
SMSW,
STR,
SWAPGS,
UCOMISD,
UCOMISS,
UNPCKHPD,
UNPCKHPS,
UNPCKLPD,
UNPCKLPS,
VERR,
VERW,
VMCALL,
VMCLEAR,
VMLAUNCH,
VMPTRLD,
VMPTRST,
VMRESUME,
VMXOFF,
VMXON,
XGETBV,
XSETBV,
}
impl InstructionOperation {
/// Look up an `InstructionOperation` given its `i32` value.
pub fn from_i32(i: i32) -> Self {
INSTRUCTION_OPERATION_TABLE[i as usize].value
}
}
impl Operation for InstructionOperation {
fn mnemonic(&self) -> &str {
INSTRUCTION_OPERATION_TABLE[*self as usize].name
}
}
impl Default for InstructionOperation {
fn default() -> Self {
InstructionOperation::INVALID
}
}
struct InstructionOperationInfo {
pub name: &'static str,
pub value: InstructionOperation,
}
static INSTRUCTION_OPERATION_TABLE: [InstructionOperationInfo; 621] = [
InstructionOperationInfo {
name: "",
value: InstructionOperation::INVALID,
},
InstructionOperationInfo {
name: "aaa",
value: InstructionOperation::AAA,
},
InstructionOperationInfo {
name: "aad",
value: InstructionOperation::AAD,
},
InstructionOperationInfo {
name: "aam",
value: InstructionOperation::AAM,
},
InstructionOperationInfo {
name: "aas",
value: InstructionOperation::AAS,
},
InstructionOperationInfo {
name: "add",
value: InstructionOperation::ADD,
},
InstructionOperationInfo {
name: "adc",
value: InstructionOperation::ADC,
},
InstructionOperationInfo {
name: "and",
value: InstructionOperation::AND,
},
InstructionOperationInfo {
name: "arpl",
value: InstructionOperation::ARPL,
},
InstructionOperationInfo {
name: "blendpd",
value: InstructionOperation::BLENDPD,
},
InstructionOperationInfo {
name: "blendps",
value: InstructionOperation::BLENDPS,
},
InstructionOperationInfo {
name: "blendvpd",
value: InstructionOperation::BLENDVPD,
},
InstructionOperationInfo {
name: "blendvps",
value: InstructionOperation::BLENDVPS,
},
InstructionOperationInfo {
name: "bound",
value: InstructionOperation::BOUND,
},
InstructionOperationInfo {
name: "bsf",
value: InstructionOperation::BSF,
},
InstructionOperationInfo {
name: "bsr",
value: InstructionOperation::BSR,
},
InstructionOperationInfo {
name: "bswap",
value: InstructionOperation::BSWAP,
},
InstructionOperationInfo {
name: "bt",
value: InstructionOperation::BT,
},
InstructionOperationInfo {
name: "btc",
value: InstructionOperation::BTC,
},
InstructionOperationInfo {
name: "btr",
value: InstructionOperation::BTR,
},
InstructionOperationInfo {
name: "bts",
value: InstructionOperation::BTS,
},
InstructionOperationInfo {
name: "callf",
value: InstructionOperation::CALLF,
},
InstructionOperationInfo {
name: "call",
value: InstructionOperation::CALL,
},
InstructionOperationInfo {
name: "clc",
value: InstructionOperation::CLC,
},
InstructionOperationInfo {
name: "cld",
value: InstructionOperation::CLD,
},
InstructionOperationInfo {
name: "clflush",
value: InstructionOperation::CLFLUSH,
},
InstructionOperationInfo {
name: "cli",
value: InstructionOperation::CLI,
},
InstructionOperationInfo {
name: "clts",
value: InstructionOperation::CLTS,
},
InstructionOperationInfo {
name: "cmc",
value: InstructionOperation::CMC,
},
InstructionOperationInfo {
name: "cmp",
value: InstructionOperation::CMP,
},
InstructionOperationInfo {
name: "cmpxch16b",
value: InstructionOperation::CMPXCH16B,
},
InstructionOperationInfo {
name: "cmpxch8b",
value: InstructionOperation::CMPXCH8B,
},
InstructionOperationInfo {
name: "cmpxchg",
value: InstructionOperation::CMPXCHG,
},
InstructionOperationInfo {
name: "cpuid",
value: InstructionOperation::CPUID,
},
InstructionOperationInfo {
name: "crc32",
value: InstructionOperation::CRC32,
},
InstructionOperationInfo {
name: "daa",
value: InstructionOperation::DAA,
},
InstructionOperationInfo {
name: "das",
value: InstructionOperation::DAS,
},
InstructionOperationInfo {
name: "dec",
value: InstructionOperation::DEC,
},
InstructionOperationInfo {
name: "div",
value: InstructionOperation::DIV,
},
InstructionOperationInfo {
name: "dppd",
value: InstructionOperation::DPPD,
},
InstructionOperationInfo {
name: "dpps",
value: InstructionOperation::DPPS,
},
InstructionOperationInfo {
name: "emms",
value: InstructionOperation::EMMS,
},
InstructionOperationInfo {
name: "enter",
value: InstructionOperation::ENTER,
},
InstructionOperationInfo {
name: "f2xm1",
value: InstructionOperation::F2XM1,
},
InstructionOperationInfo {
name: "fabs",
value: InstructionOperation::FABS,
},
InstructionOperationInfo {
name: "fadd",
value: InstructionOperation::FADD,
},
InstructionOperationInfo {
name: "faddp",
value: InstructionOperation::FADDP,
},
InstructionOperationInfo {
name: "fbld",
value: InstructionOperation::FBLD,
},
InstructionOperationInfo {
name: "fbstp",
value: InstructionOperation::FBSTP,
},
InstructionOperationInfo {
name: "fchs",
value: InstructionOperation::FCHS,
},
InstructionOperationInfo {
name: "fclex",
value: InstructionOperation::FCLEX,
},
InstructionOperationInfo {
name: "fcmovb",
value: InstructionOperation::FCMOVB,
},
InstructionOperationInfo {
name: "fcmovbe",
value: InstructionOperation::FCMOVBE,
},
InstructionOperationInfo {
name: "fcmove",
value: InstructionOperation::FCMOVE,
},
InstructionOperationInfo {
name: "fcmovnb",
value: InstructionOperation::FCMOVNB,
},
InstructionOperationInfo {
name: "fcmovnbe",
value: InstructionOperation::FCMOVNBE,
},
InstructionOperationInfo {
name: "fcmovne",
value: InstructionOperation::FCMOVNE,
},
InstructionOperationInfo {
name: "fcmovnu",
value: InstructionOperation::FCMOVNU,
},
InstructionOperationInfo {
name: "fcmovu",
value: InstructionOperation::FCMOVU,
},
InstructionOperationInfo {
name: "fcom",
value: InstructionOperation::FCOM,
},
InstructionOperationInfo {
name: "fcomi",
value: InstructionOperation::FCOMI,
},
InstructionOperationInfo {
name: "fcomip",
value: InstructionOperation::FCOMIP,
},
InstructionOperationInfo {
name: "fcomp",
value: InstructionOperation::FCOMP,
},
InstructionOperationInfo {
name: "fcompp",
value: InstructionOperation::FCOMPP,
},
InstructionOperationInfo {
name: "fcos",
value: InstructionOperation::FCOS,
},
InstructionOperationInfo {
name: "fdecstp",
value: InstructionOperation::FDECSTP,
},
InstructionOperationInfo {
name: "fdisi",
value: InstructionOperation::FDISI,
},
InstructionOperationInfo {
name: "fdiv",
value: InstructionOperation::FDIV,
},
InstructionOperationInfo {
name: "fdivp",
value: InstructionOperation::FDIVP,
},
InstructionOperationInfo {
name: "fdivr",
value: InstructionOperation::FDIVR,
},
InstructionOperationInfo {
name: "fdivrp",
value: InstructionOperation::FDIVRP,
},
InstructionOperationInfo {
name: "femms",
value: InstructionOperation::FEMMS,
},
InstructionOperationInfo {
name: "feni",
value: InstructionOperation::FENI,
},
InstructionOperationInfo {
name: "ffree",
value: InstructionOperation::FFREE,
},
InstructionOperationInfo {
name: "ffreep",
value: InstructionOperation::FFREEP,
},
InstructionOperationInfo {
name: "fiadd",
value: InstructionOperation::FIADD,
},
InstructionOperationInfo {
name: "ficom",
value: InstructionOperation::FICOM,
},
InstructionOperationInfo {
name: "ficomp",
value: InstructionOperation::FICOMP,
},
InstructionOperationInfo {
name: "fidiv",
value: InstructionOperation::FIDIV,
},
InstructionOperationInfo {
name: "fidivr",
value: InstructionOperation::FIDIVR,
},
InstructionOperationInfo {
name: "fild",
value: InstructionOperation::FILD,
},
InstructionOperationInfo {
name: "fimul",
value: InstructionOperation::FIMUL,
},
InstructionOperationInfo {
name: "fincstp",
value: InstructionOperation::FINCSTP,
},
InstructionOperationInfo {
name: "finit",
value: InstructionOperation::FINIT,
},
InstructionOperationInfo {
name: "fist",
value: InstructionOperation::FIST,
},
InstructionOperationInfo {
name: "fistp",
value: InstructionOperation::FISTP,
},
InstructionOperationInfo {
name: "fisttp",
value: InstructionOperation::FISTTP,
},
InstructionOperationInfo {
name: "fisub",
value: InstructionOperation::FISUB,
},
InstructionOperationInfo {
name: "fisubr",
value: InstructionOperation::FISUBR,
},
InstructionOperationInfo {
name: "fld",
value: InstructionOperation::FLD,
},
InstructionOperationInfo {
name: "fld1",
value: InstructionOperation::FLD1,
},
InstructionOperationInfo {
name: "fldcw",
value: InstructionOperation::FLDCW,
},
InstructionOperationInfo {
name: "fldenv",
value: InstructionOperation::FLDENV,
},
InstructionOperationInfo {
name: "fldl2e",
value: InstructionOperation::FLDL2E,
},
InstructionOperationInfo {
name: "fldl2t",
value: InstructionOperation::FLDL2T,
},
InstructionOperationInfo {
name: "fldlg2",
value: InstructionOperation::FLDLG2,
},
InstructionOperationInfo {
name: "fldln2",
value: InstructionOperation::FLDLN2,
},
InstructionOperationInfo {
name: "fldpi",
value: InstructionOperation::FLDPI,
},
InstructionOperationInfo {
name: "fldz",
value: InstructionOperation::FLDZ,
},
InstructionOperationInfo {
name: "fmul",
value: InstructionOperation::FMUL,
},
InstructionOperationInfo {
name: "fmulp",
value: InstructionOperation::FMULP,
},
InstructionOperationInfo {
name: "fnop",
value: InstructionOperation::FNOP,
},
InstructionOperationInfo {
name: "fpatan",
value: InstructionOperation::FPATAN,
},
InstructionOperationInfo {
name: "fprem",
value: InstructionOperation::FPREM,
},
InstructionOperationInfo {
name: "fprem1",
value: InstructionOperation::FPREM1,
},
InstructionOperationInfo {
name: "fptan",
value: InstructionOperation::FPTAN,
},
InstructionOperationInfo {
name: "frichop",
value: InstructionOperation::FRICHOP,
},
InstructionOperationInfo {
name: "frinear",
value: InstructionOperation::FRINEAR,
},
InstructionOperationInfo {
name: "frint2",
value: InstructionOperation::FRINT2,
},
InstructionOperationInfo {
name: "frndint",
value: InstructionOperation::FRNDINT,
},
InstructionOperationInfo {
name: "frstor",
value: InstructionOperation::FRSTOR,
},
InstructionOperationInfo {
name: "frstpm",
value: InstructionOperation::FRSTPM,
},
InstructionOperationInfo {
name: "fsave",
value: InstructionOperation::FSAVE,
},
InstructionOperationInfo {
name: "fscale",
value: InstructionOperation::FSCALE,
},
InstructionOperationInfo {
name: "fsetpm",
value: InstructionOperation::FSETPM,
},
InstructionOperationInfo {
name: "fsin",
value: InstructionOperation::FSIN,
},
InstructionOperationInfo {
name: "fsincos",
value: InstructionOperation::FSINCOS,
},
InstructionOperationInfo {
name: "fsqrt",
value: InstructionOperation::FSQRT,
},
InstructionOperationInfo {
name: "fst",
value: InstructionOperation::FST,
},
InstructionOperationInfo {
name: "fstcw",
value: InstructionOperation::FSTCW,
},
InstructionOperationInfo {
name: "fstdw",
value: InstructionOperation::FSTDW,
},
InstructionOperationInfo {
name: "fstenv",
value: InstructionOperation::FSTENV,
},
InstructionOperationInfo {
name: "fstp",
value: InstructionOperation::FSTP,
},
InstructionOperationInfo {
name: "fstsg",
value: InstructionOperation::FSTSG,
},
InstructionOperationInfo {
name: "fstsw",
value: InstructionOperation::FSTSW,
},
InstructionOperationInfo {
name: "fsub",
value: InstructionOperation::FSUB,
},
InstructionOperationInfo {
name: "fsubp",
value: InstructionOperation::FSUBP,
},
InstructionOperationInfo {
name: "fsubr",
value: InstructionOperation::FSUBR,
},
InstructionOperationInfo {
name: "fsubrp",
value: InstructionOperation::FSUBRP,
},
InstructionOperationInfo {
name: "ftst",
value: InstructionOperation::FTST,
},
InstructionOperationInfo {
name: "fucom",
value: InstructionOperation::FUCOM,
},
InstructionOperationInfo {
name: "fucomi",
value: InstructionOperation::FUCOMI,
},
InstructionOperationInfo {
name: "fucomip",
value: InstructionOperation::FUCOMIP,
},
InstructionOperationInfo {
name: "fucomp",
value: InstructionOperation::FUCOMP,
},
InstructionOperationInfo {
name: "fucompp",
value: InstructionOperation::FUCOMPP,
},
InstructionOperationInfo {
name: "fwait",
value: InstructionOperation::FWAIT,
},
InstructionOperationInfo {
name: "fxam",
value: InstructionOperation::FXAM,
},
InstructionOperationInfo {
name: "fxch",
value: InstructionOperation::FXCH,
},
InstructionOperationInfo {
name: "fxrstor",
value: InstructionOperation::FXRSTOR,
},
InstructionOperationInfo {
name: "fxsave",
value: InstructionOperation::FXSAVE,
},
InstructionOperationInfo {
name: "fxtract",
value: InstructionOperation::FXTRACT,
},
InstructionOperationInfo {
name: "fyl2x",
value: InstructionOperation::FYL2X,
},
InstructionOperationInfo {
name: "fyl2xp1",
value: InstructionOperation::FYL2XP1,
},
InstructionOperationInfo {
name: "getsec",
value: InstructionOperation::GETSEC,
},
InstructionOperationInfo {
name: "hlt",
value: InstructionOperation::HLT,
},
InstructionOperationInfo {
name: "idiv",
value: InstructionOperation::IDIV,
},
InstructionOperationInfo {
name: "imul",
value: InstructionOperation::IMUL,
},
InstructionOperationInfo {
name: "in",
value: InstructionOperation::IN,
},
InstructionOperationInfo {
name: "inc",
value: InstructionOperation::INC,
},
InstructionOperationInfo {
name: "int",
value: InstructionOperation::INT,
},
InstructionOperationInfo {
name: "int1",
value: InstructionOperation::INT1,
},
InstructionOperationInfo {
name: "int3",
value: InstructionOperation::INT3,
},
InstructionOperationInfo {
name: "into",
value: InstructionOperation::INTO,
},
InstructionOperationInfo {
name: "invd",
value: InstructionOperation::INVD,
},
InstructionOperationInfo {
name: "invlpg",
value: InstructionOperation::INVLPG,
},
InstructionOperationInfo {
name: "iret",
value: InstructionOperation::IRET,
},
InstructionOperationInfo {
name: "jmpf",
value: InstructionOperation::JMPF,
},
InstructionOperationInfo {
name: "jmp",
value: InstructionOperation::JMP,
},
InstructionOperationInfo {
name: "lahf",
value: InstructionOperation::LAHF,
},
InstructionOperationInfo {
name: "lar",
value: InstructionOperation::LAR,
},
InstructionOperationInfo {
name: "ldmxcsr",
value: InstructionOperation::LDMXCSR,
},
InstructionOperationInfo {
name: "lds",
value: InstructionOperation::LDS,
},
InstructionOperationInfo {
name: "lea",
value: InstructionOperation::LEA,
},
InstructionOperationInfo {
name: "leave",
value: InstructionOperation::LEAVE,
},
InstructionOperationInfo {
name: "les",
value: InstructionOperation::LES,
},
InstructionOperationInfo {
name: "lfence",
value: InstructionOperation::LFENCE,
},
InstructionOperationInfo {
name: "lfs",
value: InstructionOperation::LFS,
},
InstructionOperationInfo {
name: "lgs",
value: InstructionOperation::LGS,
},
InstructionOperationInfo {
name: "loop",
value: InstructionOperation::LOOP,
},
InstructionOperationInfo {
name: "loope",
value: InstructionOperation::LOOPE,
},
InstructionOperationInfo {
name: "loopne",
value: InstructionOperation::LOOPNE,
},
InstructionOperationInfo {
name: "lsl",
value: InstructionOperation::LSL,
},
InstructionOperationInfo {
name: "lss",
value: InstructionOperation::LSS,
},
InstructionOperationInfo {
name: "mfence",
value: InstructionOperation::MFENCE,
},
InstructionOperationInfo {
name: "mov",
value: InstructionOperation::MOV,
},
InstructionOperationInfo {
name: "movnti",
value: InstructionOperation::MOVNTI,
},
InstructionOperationInfo {
name: "movss",
value: InstructionOperation::MOVSS,
},
InstructionOperationInfo {
name: "movsx",
value: InstructionOperation::MOVSX,
},
InstructionOperationInfo {
name: "movsxd",
value: InstructionOperation::MOVSXD,
},
InstructionOperationInfo {
name: "movupd",
value: InstructionOperation::MOVUPD,
},
InstructionOperationInfo {
name: "movups",
value: InstructionOperation::MOVUPS,
},
InstructionOperationInfo {
name: "movzx",
value: InstructionOperation::MOVZX,
},
InstructionOperationInfo {
name: "mpsadbw",
value: InstructionOperation::MPSADBW,
},
InstructionOperationInfo {
name: "mul",
value: InstructionOperation::MUL,
},
InstructionOperationInfo {
name: "neg",
value: InstructionOperation::NEG,
},
InstructionOperationInfo {
name: "nop",
value: InstructionOperation::NOP,
},
InstructionOperationInfo {
name: "not",
value: InstructionOperation::NOT,
},
InstructionOperationInfo {
name: "or",
value: InstructionOperation::OR,
},
InstructionOperationInfo {
name: "out",
value: InstructionOperation::OUT,
},
InstructionOperationInfo {
name: "packssdw",
value: InstructionOperation::PACKSSDW,
},
InstructionOperationInfo {
name: "packsswb",
value: InstructionOperation::PACKSSWB,
},
InstructionOperationInfo {
name: "packusdw",
value: InstructionOperation::PACKUSDW,
},
InstructionOperationInfo {
name: "packuswb",
value: InstructionOperation::PACKUSWB,
},
InstructionOperationInfo {
name: "pabsb",
value: InstructionOperation::PABSB,
},
InstructionOperationInfo {
name: "pabsd",
value: InstructionOperation::PABSD,
},
InstructionOperationInfo {
name: "pabsw",
value: InstructionOperation::PABSW,
},
InstructionOperationInfo {
name: "paddb",
value: InstructionOperation::PADDB,
},
InstructionOperationInfo {
name: "paddd",
value: InstructionOperation::PADDD,
},
InstructionOperationInfo {
name: "paddq",
value: InstructionOperation::PADDQ,
},
InstructionOperationInfo {
name: "paddw",
value: InstructionOperation::PADDW,
},
InstructionOperationInfo {
name: "paddsb",
value: InstructionOperation::PADDSB,
},
InstructionOperationInfo {
name: "paddsw",
value: InstructionOperation::PADDSW,
},
InstructionOperationInfo {
name: "paddusb",
value: InstructionOperation::PADDUSB,
},
InstructionOperationInfo {
name: "paddusw",
value: InstructionOperation::PADDUSW,
},
InstructionOperationInfo {
name: "palignr",
value: InstructionOperation::PALIGNR,
},
InstructionOperationInfo {
name: "pand",
value: InstructionOperation::PAND,
},
InstructionOperationInfo {
name: "pandn",
value: InstructionOperation::PANDN,
},
InstructionOperationInfo {
name: "pause",
value: InstructionOperation::PAUSE,
},
InstructionOperationInfo {
name: "pavgb",
value: InstructionOperation::PAVGB,
},
InstructionOperationInfo {
name: "pavgusb",
value: InstructionOperation::PAVGUSB,
},
InstructionOperationInfo {
name: "pavgw",
value: InstructionOperation::PAVGW,
},
InstructionOperationInfo {
name: "pblendvb",
value: InstructionOperation::PBLENDVB,
},
InstructionOperationInfo {
name: "pblendw",
value: InstructionOperation::PBLENDW,
},
InstructionOperationInfo {
name: "pcmpeqb",
value: InstructionOperation::PCMPEQB,
},
InstructionOperationInfo {
name: "pcmpeqd",
value: InstructionOperation::PCMPEQD,
},
InstructionOperationInfo {
name: "pcmpeqq",
value: InstructionOperation::PCMPEQQ,
},
InstructionOperationInfo {
name: "pcmpeqw",
value: InstructionOperation::PCMPEQW,
},
InstructionOperationInfo {
name: "pcmpestri",
value: InstructionOperation::PCMPESTRI,
},
InstructionOperationInfo {
name: "pcmpestrm",
value: InstructionOperation::PCMPESTRM,
},
InstructionOperationInfo {
name: "pcmpgtb",
value: InstructionOperation::PCMPGTB,
},
InstructionOperationInfo {
name: "pcmpgtd",
value: InstructionOperation::PCMPGTD,
},
InstructionOperationInfo {
name: "pcmpgtq",
value: InstructionOperation::PCMPGTQ,
},
InstructionOperationInfo {
name: "pcmpgtw",
value: InstructionOperation::PCMPGTW,
},
InstructionOperationInfo {
name: "pcmpistri",
value: InstructionOperation::PCMPISTRI,
},
InstructionOperationInfo {
name: "pcmpistrm",
value: InstructionOperation::PCMPISTRM,
},
InstructionOperationInfo {
name: "pf2id",
value: InstructionOperation::PF2ID,
},
InstructionOperationInfo {
name: "pf2iw",
value: InstructionOperation::PF2IW,
},
InstructionOperationInfo {
name: "pfacc",
value: InstructionOperation::PFACC,
},
InstructionOperationInfo {
name: "pfadd",
value: InstructionOperation::PFADD,
},
InstructionOperationInfo {
name: "pfcmpeq",
value: InstructionOperation::PFCMPEQ,
},
InstructionOperationInfo {
name: "pfcmpge",
value: InstructionOperation::PFCMPGE,
},
InstructionOperationInfo {
name: "pfcmpgt",
value: InstructionOperation::PFCMPGT,
},
InstructionOperationInfo {
name: "pfmax",
value: InstructionOperation::PFMAX,
},
InstructionOperationInfo {
name: "pfmin",
value: InstructionOperation::PFMIN,
},
InstructionOperationInfo {
name: "pfmul",
value: InstructionOperation::PFMUL,
},
InstructionOperationInfo {
name: "pfnacc",
value: InstructionOperation::PFNACC,
},
InstructionOperationInfo {
name: "pfpnacc",
value: InstructionOperation::PFPNACC,
},
InstructionOperationInfo {
name: "pfrcp",
value: InstructionOperation::PFRCP,
},
InstructionOperationInfo {
name: "pfrcpit1",
value: InstructionOperation::PFRCPIT1,
},
InstructionOperationInfo {
name: "pfrcpit2",
value: InstructionOperation::PFRCPIT2,
},
InstructionOperationInfo {
name: "pfrcpv",
value: InstructionOperation::PFRCPV,
},
InstructionOperationInfo {
name: "pfrsqit1",
value: InstructionOperation::PFRSQIT1,
},
InstructionOperationInfo {
name: "pfrsqrt",
value: InstructionOperation::PFRSQRT,
},
InstructionOperationInfo {
name: "pfrsqrtv",
value: InstructionOperation::PFRSQRTV,
},
InstructionOperationInfo {
name: "pfsub",
value: InstructionOperation::PFSUB,
},
InstructionOperationInfo {
name: "pfsubr",
value: InstructionOperation::PFSUBR,
},
InstructionOperationInfo {
name: "phaddd",
value: InstructionOperation::PHADDD,
},
InstructionOperationInfo {
name: "phaddsw",
value: InstructionOperation::PHADDSW,
},
InstructionOperationInfo {
name: "phaddw",
value: InstructionOperation::PHADDW,
},
InstructionOperationInfo {
name: "phminposuw",
value: InstructionOperation::PHMINPOSUW,
},
InstructionOperationInfo {
name: "phsubd",
value: InstructionOperation::PHSUBD,
},
InstructionOperationInfo {
name: "phsubsw",
value: InstructionOperation::PHSUBSW,
},
InstructionOperationInfo {
name: "phsubw",
value: InstructionOperation::PHSUBW,
},
InstructionOperationInfo {
name: "pi2fd",
value: InstructionOperation::PI2FD,
},
InstructionOperationInfo {
name: "pi2fw",
value: InstructionOperation::PI2FW,
},
InstructionOperationInfo {
name: "pmaddwd",
value: InstructionOperation::PMADDWD,
},
InstructionOperationInfo {
name: "pmaddubsw",
value: InstructionOperation::PMADDUBSW,
},
InstructionOperationInfo {
name: "pmaxsb",
value: InstructionOperation::PMAXSB,
},
InstructionOperationInfo {
name: "pmaxsd",
value: InstructionOperation::PMAXSD,
},
InstructionOperationInfo {
name: "pmaxsw",
value: InstructionOperation::PMAXSW,
},
InstructionOperationInfo {
name: "pmaxub",
value: InstructionOperation::PMAXUB,
},
InstructionOperationInfo {
name: "pmaxud",
value: InstructionOperation::PMAXUD,
},
InstructionOperationInfo {
name: "pmaxuw",
value: InstructionOperation::PMAXUW,
},
InstructionOperationInfo {
name: "pminsb",
value: InstructionOperation::PMINSB,
},
InstructionOperationInfo {
name: "pminsd",
value: InstructionOperation::PMINSD,
},
InstructionOperationInfo {
name: "pminsw",
value: InstructionOperation::PMINSW,
},
InstructionOperationInfo {
name: "pminub",
value: InstructionOperation::PMINUB,
},
InstructionOperationInfo {
name: "pminud",
value: InstructionOperation::PMINUD,
},
InstructionOperationInfo {
name: "pminuw",
value: InstructionOperation::PMINUW,
},
InstructionOperationInfo {
name: "pmuldq",
value: InstructionOperation::PMULDQ,
},
InstructionOperationInfo {
name: "pmulhrsw",
value: InstructionOperation::PMULHRSW,
},
InstructionOperationInfo {
name: "pmulhrw",
value: InstructionOperation::PMULHRW,
},
InstructionOperationInfo {
name: "pmulhuw",
value: InstructionOperation::PMULHUW,
},
InstructionOperationInfo {
name: "pmulhw",
value: InstructionOperation::PMULHW,
},
InstructionOperationInfo {
name: "pmulld",
value: InstructionOperation::PMULLD,
},
InstructionOperationInfo {
name: "pmullw",
value: InstructionOperation::PMULLW,
},
InstructionOperationInfo {
name: "pmuludq",
value: InstructionOperation::PMULUDQ,
},
InstructionOperationInfo {
name: "pop",
value: InstructionOperation::POP,
},
InstructionOperationInfo {
name: "popcnt",
value: InstructionOperation::POPCNT,
},
InstructionOperationInfo {
name: "por",
value: InstructionOperation::POR,
},
InstructionOperationInfo {
name: "psadbw",
value: InstructionOperation::PSADBW,
},
InstructionOperationInfo {
name: "pshufb",
value: InstructionOperation::PSHUFB,
},
InstructionOperationInfo {
name: "psignb",
value: InstructionOperation::PSIGNB,
},
InstructionOperationInfo {
name: "psignd",
value: InstructionOperation::PSIGND,
},
InstructionOperationInfo {
name: "psignw",
value: InstructionOperation::PSIGNW,
},
InstructionOperationInfo {
name: "pslld",
value: InstructionOperation::PSLLD,
},
InstructionOperationInfo {
name: "pslldq",
value: InstructionOperation::PSLLDQ,
},
InstructionOperationInfo {
name: "psllq",
value: InstructionOperation::PSLLQ,
},
InstructionOperationInfo {
name: "psllw",
value: InstructionOperation::PSLLW,
},
InstructionOperationInfo {
name: "psrad",
value: InstructionOperation::PSRAD,
},
InstructionOperationInfo {
name: "psraw",
value: InstructionOperation::PSRAW,
},
InstructionOperationInfo {
name: "psrld",
value: InstructionOperation::PSRLD,
},
InstructionOperationInfo {
name: "psrldq",
value: InstructionOperation::PSRLDQ,
},
InstructionOperationInfo {
name: "psrlq",
value: InstructionOperation::PSRLQ,
},
InstructionOperationInfo {
name: "psrlw",
value: InstructionOperation::PSRLW,
},
InstructionOperationInfo {
name: "psubb",
value: InstructionOperation::PSUBB,
},
InstructionOperationInfo {
name: "psubd",
value: InstructionOperation::PSUBD,
},
InstructionOperationInfo {
name: "psubq",
value: InstructionOperation::PSUBQ,
},
InstructionOperationInfo {
name: "psubw",
value: InstructionOperation::PSUBW,
},
InstructionOperationInfo {
name: "psubsb",
value: InstructionOperation::PSUBSB,
},
InstructionOperationInfo {
name: "psubsw",
value: InstructionOperation::PSUBSW,
},
InstructionOperationInfo {
name: "psubusb",
value: InstructionOperation::PSUBUSB,
},
InstructionOperationInfo {
name: "psubusw",
value: InstructionOperation::PSUBUSW,
},
InstructionOperationInfo {
name: "pswapd",
value: InstructionOperation::PSWAPD,
},
InstructionOperationInfo {
name: "ptest",
value: InstructionOperation::PTEST,
},
InstructionOperationInfo {
name: "punpckhbw",
value: InstructionOperation::PUNPCKHBW,
},
InstructionOperationInfo {
name: "punpckhdq",
value: InstructionOperation::PUNPCKHDQ,
},
InstructionOperationInfo {
name: "punpckhqdq",
value: InstructionOperation::PUNPCKHQDQ,
},
InstructionOperationInfo {
name: "punpckhwd",
value: InstructionOperation::PUNPCKHWD,
},
InstructionOperationInfo {
name: "punpcklqdq",
value: InstructionOperation::PUNPCKLQDQ,
},
InstructionOperationInfo {
name: "push",
value: InstructionOperation::PUSH,
},
InstructionOperationInfo {
name: "pxor",
value: InstructionOperation::PXOR,
},
InstructionOperationInfo {
name: "rdmsr",
value: InstructionOperation::RDMSR,
},
InstructionOperationInfo {
name: "rdpmc",
value: InstructionOperation::RDPMC,
},
InstructionOperationInfo {
name: "rdtsc",
value: InstructionOperation::RDTSC,
},
InstructionOperationInfo {
name: "retf",
value: InstructionOperation::RETF,
},
InstructionOperationInfo {
name: "retn",
value: InstructionOperation::RETN,
},
InstructionOperationInfo {
name: "rcl",
value: InstructionOperation::RCL,
},
InstructionOperationInfo {
name: "rcr",
value: InstructionOperation::RCR,
},
InstructionOperationInfo {
name: "rol",
value: InstructionOperation::ROL,
},
InstructionOperationInfo {
name: "ror",
value: InstructionOperation::ROR,
},
InstructionOperationInfo {
name: "roundps",
value: InstructionOperation::ROUNDPS,
},
InstructionOperationInfo {
name: "roundpd",
value: InstructionOperation::ROUNDPD,
},
InstructionOperationInfo {
name: "rsm",
value: InstructionOperation::RSM,
},
InstructionOperationInfo {
name: "sahf",
value: InstructionOperation::SAHF,
},
InstructionOperationInfo {
name: "salc",
value: InstructionOperation::SALC,
},
InstructionOperationInfo {
name: "sar",
value: InstructionOperation::SAR,
},
InstructionOperationInfo {
name: "sbb",
value: InstructionOperation::SBB,
},
InstructionOperationInfo {
name: "sfence",
value: InstructionOperation::SFENCE,
},
InstructionOperationInfo {
name: "shl",
value: InstructionOperation::SHL,
},
InstructionOperationInfo {
name: "shld",
value: InstructionOperation::SHLD,
},
InstructionOperationInfo {
name: "shr",
value: InstructionOperation::SHR,
},
InstructionOperationInfo {
name: "shrd",
value: InstructionOperation::SHRD,
},
InstructionOperationInfo {
name: "sub",
value: InstructionOperation::SUB,
},
InstructionOperationInfo {
name: "stc",
value: InstructionOperation::STC,
},
InstructionOperationInfo {
name: "std",
value: InstructionOperation::STD,
},
InstructionOperationInfo {
name: "sti",
value: InstructionOperation::STI,
},
InstructionOperationInfo {
name: "stmxcsr",
value: InstructionOperation::STMXCSR,
},
InstructionOperationInfo {
name: "syscall",
value: InstructionOperation::SYSCALL,
},
InstructionOperationInfo {
name: "sysenter",
value: InstructionOperation::SYSENTER,
},
InstructionOperationInfo {
name: "sysexit",
value: InstructionOperation::SYSEXIT,
},
InstructionOperationInfo {
name: "sysret",
value: InstructionOperation::SYSRET,
},
InstructionOperationInfo {
name: "test",
value: InstructionOperation::TEST,
},
InstructionOperationInfo {
name: "ud2",
value: InstructionOperation::UD2,
},
InstructionOperationInfo {
name: "vmread",
value: InstructionOperation::VMREAD,
},
InstructionOperationInfo {
name: "vmwrite",
value: InstructionOperation::VMWRITE,
},
InstructionOperationInfo {
name: "wbinvd",
value: InstructionOperation::WBINVD,
},
InstructionOperationInfo {
name: "wrmsr",
value: InstructionOperation::WRMSR,
},
InstructionOperationInfo {
name: "xchg",
value: InstructionOperation::XCHG,
},
InstructionOperationInfo {
name: "xlat",
value: InstructionOperation::XLAT,
},
InstructionOperationInfo {
name: "xadd",
value: InstructionOperation::XADD,
},
InstructionOperationInfo {
name: "xor",
value: InstructionOperation::XOR,
},
InstructionOperationInfo {
name: "xrstor",
value: InstructionOperation::XRSTOR,
},
InstructionOperationInfo {
name: "xsave",
value: InstructionOperation::XSAVE,
},
InstructionOperationInfo {
name: "addps",
value: InstructionOperation::ADDPS,
},
InstructionOperationInfo {
name: "addpd",
value: InstructionOperation::ADDPD,
},
InstructionOperationInfo {
name: "addsd",
value: InstructionOperation::ADDSD,
},
InstructionOperationInfo {
name: "addss",
value: InstructionOperation::ADDSS,
},
InstructionOperationInfo {
name: "addsubpd",
value: InstructionOperation::ADDSUBPD,
},
InstructionOperationInfo {
name: "addsubps",
value: InstructionOperation::ADDSUBPS,
},
InstructionOperationInfo {
name: "andnps",
value: InstructionOperation::ANDNPS,
},
InstructionOperationInfo {
name: "andnpd",
value: InstructionOperation::ANDNPD,
},
InstructionOperationInfo {
name: "andps",
value: InstructionOperation::ANDPS,
},
InstructionOperationInfo {
name: "andpd",
value: InstructionOperation::ANDPD,
},
InstructionOperationInfo {
name: "cbw",
value: InstructionOperation::CBW,
},
InstructionOperationInfo {
name: "cwde",
value: InstructionOperation::CWDE,
},
InstructionOperationInfo {
name: "cdqe",
value: InstructionOperation::CDQE,
},
InstructionOperationInfo {
name: "cmpsb",
value: InstructionOperation::CMPSB,
},
InstructionOperationInfo {
name: "cmpsw",
value: InstructionOperation::CMPSW,
},
InstructionOperationInfo {
name: "cmpsd",
value: InstructionOperation::CMPSD,
},
InstructionOperationInfo {
name: "cmpsq",
value: InstructionOperation::CMPSQ,
},
InstructionOperationInfo {
name: "cmovo",
value: InstructionOperation::CMOVO,
},
InstructionOperationInfo {
name: "cmovno",
value: InstructionOperation::CMOVNO,
},
InstructionOperationInfo {
name: "cmovb",
value: InstructionOperation::CMOVB,
},
InstructionOperationInfo {
name: "cmovae",
value: InstructionOperation::CMOVAE,
},
InstructionOperationInfo {
name: "cmove",
value: InstructionOperation::CMOVE,
},
InstructionOperationInfo {
name: "cmovne",
value: InstructionOperation::CMOVNE,
},
InstructionOperationInfo {
name: "cmovbe",
value: InstructionOperation::CMOVBE,
},
InstructionOperationInfo {
name: "cmova",
value: InstructionOperation::CMOVA,
},
InstructionOperationInfo {
name: "cmovs",
value: InstructionOperation::CMOVS,
},
InstructionOperationInfo {
name: "cmovns",
value: InstructionOperation::CMOVNS,
},
InstructionOperationInfo {
name: "cmovpe",
value: InstructionOperation::CMOVPE,
},
InstructionOperationInfo {
name: "cmovpo",
value: InstructionOperation::CMOVPO,
},
InstructionOperationInfo {
name: "cmovl",
value: InstructionOperation::CMOVL,
},
InstructionOperationInfo {
name: "cmovge",
value: InstructionOperation::CMOVGE,
},
InstructionOperationInfo {
name: "cmovle",
value: InstructionOperation::CMOVLE,
},
InstructionOperationInfo {
name: "cmovg",
value: InstructionOperation::CMOVG,
},
InstructionOperationInfo {
name: "cwd",
value: InstructionOperation::CWD,
},
InstructionOperationInfo {
name: "cdq",
value: InstructionOperation::CDQ,
},
InstructionOperationInfo {
name: "cqo",
value: InstructionOperation::CQO,
},
InstructionOperationInfo {
name: "divps",
value: InstructionOperation::DIVPS,
},
InstructionOperationInfo {
name: "divpd",
value: InstructionOperation::DIVPD,
},
InstructionOperationInfo {
name: "divsd",
value: InstructionOperation::DIVSD,
},
InstructionOperationInfo {
name: "divss",
value: InstructionOperation::DIVSS,
},
InstructionOperationInfo {
name: "insb",
value: InstructionOperation::INSB,
},
InstructionOperationInfo {
name: "insw",
value: InstructionOperation::INSW,
},
InstructionOperationInfo {
name: "insd",
value: InstructionOperation::INSD,
},
InstructionOperationInfo {
name: "insq",
value: InstructionOperation::INSQ,
},
InstructionOperationInfo {
name: "jcxz",
value: InstructionOperation::JCXZ,
},
InstructionOperationInfo {
name: "jecxz",
value: InstructionOperation::JECXZ,
},
InstructionOperationInfo {
name: "jrcxz",
value: InstructionOperation::JRCXZ,
},
InstructionOperationInfo {
name: "jo",
value: InstructionOperation::JO,
},
InstructionOperationInfo {
name: "jno",
value: InstructionOperation::JNO,
},
InstructionOperationInfo {
name: "jb",
value: InstructionOperation::JB,
},
InstructionOperationInfo {
name: "jae",
value: InstructionOperation::JAE,
},
InstructionOperationInfo {
name: "je",
value: InstructionOperation::JE,
},
InstructionOperationInfo {
name: "jne",
value: InstructionOperation::JNE,
},
InstructionOperationInfo {
name: "jbe",
value: InstructionOperation::JBE,
},
InstructionOperationInfo {
name: "ja",
value: InstructionOperation::JA,
},
InstructionOperationInfo {
name: "js",
value: InstructionOperation::JS,
},
InstructionOperationInfo {
name: "jns",
value: InstructionOperation::JNS,
},
InstructionOperationInfo {
name: "jpe",
value: InstructionOperation::JPE,
},
InstructionOperationInfo {
name: "jpo",
value: InstructionOperation::JPO,
},
InstructionOperationInfo {
name: "jl",
value: InstructionOperation::JL,
},
InstructionOperationInfo {
name: "jge",
value: InstructionOperation::JGE,
},
InstructionOperationInfo {
name: "jle",
value: InstructionOperation::JLE,
},
InstructionOperationInfo {
name: "jg",
value: InstructionOperation::JG,
},
InstructionOperationInfo {
name: "lodsb",
value: InstructionOperation::LODSB,
},
InstructionOperationInfo {
name: "lodsw",
value: InstructionOperation::LODSW,
},
InstructionOperationInfo {
name: "lodsd",
value: InstructionOperation::LODSD,
},
InstructionOperationInfo {
name: "lodsq",
value: InstructionOperation::LODSQ,
},
InstructionOperationInfo {
name: "maxps",
value: InstructionOperation::MAXPS,
},
InstructionOperationInfo {
name: "maxpd",
value: InstructionOperation::MAXPD,
},
InstructionOperationInfo {
name: "maxsd",
value: InstructionOperation::MAXSD,
},
InstructionOperationInfo {
name: "maxss",
value: InstructionOperation::MAXSS,
},
InstructionOperationInfo {
name: "minps",
value: InstructionOperation::MINPS,
},
InstructionOperationInfo {
name: "minpd",
value: InstructionOperation::MINPD,
},
InstructionOperationInfo {
name: "minsd",
value: InstructionOperation::MINSD,
},
InstructionOperationInfo {
name: "minss",
value: InstructionOperation::MINSS,
},
InstructionOperationInfo {
name: "movd",
value: InstructionOperation::MOVD,
},
InstructionOperationInfo {
name: "movq",
value: InstructionOperation::MOVQ,
},
InstructionOperationInfo {
name: "movsb",
value: InstructionOperation::MOVSB,
},
InstructionOperationInfo {
name: "movsw",
value: InstructionOperation::MOVSW,
},
InstructionOperationInfo {
name: "movsd",
value: InstructionOperation::MOVSD,
},
InstructionOperationInfo {
name: "movsq",
value: InstructionOperation::MOVSQ,
},
InstructionOperationInfo {
name: "mulps",
value: InstructionOperation::MULPS,
},
InstructionOperationInfo {
name: "mulpd",
value: InstructionOperation::MULPD,
},
InstructionOperationInfo {
name: "mulsd",
value: InstructionOperation::MULSD,
},
InstructionOperationInfo {
name: "mulss",
value: InstructionOperation::MULSS,
},
InstructionOperationInfo {
name: "orps",
value: InstructionOperation::ORPS,
},
InstructionOperationInfo {
name: "orpd",
value: InstructionOperation::ORPD,
},
InstructionOperationInfo {
name: "outsb",
value: InstructionOperation::OUTSB,
},
InstructionOperationInfo {
name: "outsw",
value: InstructionOperation::OUTSW,
},
InstructionOperationInfo {
name: "outsd",
value: InstructionOperation::OUTSD,
},
InstructionOperationInfo {
name: "outsq",
value: InstructionOperation::OUTSQ,
},
InstructionOperationInfo {
name: "pextrd",
value: InstructionOperation::PEXTRD,
},
InstructionOperationInfo {
name: "pextrq",
value: InstructionOperation::PEXTRQ,
},
InstructionOperationInfo {
name: "pinsrd",
value: InstructionOperation::PINSRD,
},
InstructionOperationInfo {
name: "pinsrq",
value: InstructionOperation::PINSRQ,
},
InstructionOperationInfo {
name: "popa",
value: InstructionOperation::POPA,
},
InstructionOperationInfo {
name: "popad",
value: InstructionOperation::POPAD,
},
InstructionOperationInfo {
name: "popf",
value: InstructionOperation::POPF,
},
InstructionOperationInfo {
name: "popfd",
value: InstructionOperation::POPFD,
},
InstructionOperationInfo {
name: "popfq",
value: InstructionOperation::POPFQ,
},
InstructionOperationInfo {
name: "pusha",
value: InstructionOperation::PUSHA,
},
InstructionOperationInfo {
name: "pushad",
value: InstructionOperation::PUSHAD,
},
InstructionOperationInfo {
name: "pushf",
value: InstructionOperation::PUSHF,
},
InstructionOperationInfo {
name: "pushfd",
value: InstructionOperation::PUSHFD,
},
InstructionOperationInfo {
name: "pushfq",
value: InstructionOperation::PUSHFQ,
},
InstructionOperationInfo {
name: "rcpps",
value: InstructionOperation::RCPPS,
},
InstructionOperationInfo {
name: "rcpss",
value: InstructionOperation::RCPSS,
},
InstructionOperationInfo {
name: "rsqrtps",
value: InstructionOperation::RSQRTPS,
},
InstructionOperationInfo {
name: "rsqrtss",
value: InstructionOperation::RSQRTSS,
},
InstructionOperationInfo {
name: "scasb",
value: InstructionOperation::SCASB,
},
InstructionOperationInfo {
name: "scasw",
value: InstructionOperation::SCASW,
},
InstructionOperationInfo {
name: "scasd",
value: InstructionOperation::SCASD,
},
InstructionOperationInfo {
name: "scasq",
value: InstructionOperation::SCASQ,
},
InstructionOperationInfo {
name: "seto",
value: InstructionOperation::SETO,
},
InstructionOperationInfo {
name: "setno",
value: InstructionOperation::SETNO,
},
InstructionOperationInfo {
name: "setb",
value: InstructionOperation::SETB,
},
InstructionOperationInfo {
name: "setae",
value: InstructionOperation::SETAE,
},
InstructionOperationInfo {
name: "sete",
value: InstructionOperation::SETE,
},
InstructionOperationInfo {
name: "setne",
value: InstructionOperation::SETNE,
},
InstructionOperationInfo {
name: "setbe",
value: InstructionOperation::SETBE,
},
InstructionOperationInfo {
name: "seta",
value: InstructionOperation::SETA,
},
InstructionOperationInfo {
name: "sets",
value: InstructionOperation::SETS,
},
InstructionOperationInfo {
name: "setns",
value: InstructionOperation::SETNS,
},
InstructionOperationInfo {
name: "setpe",
value: InstructionOperation::SETPE,
},
InstructionOperationInfo {
name: "setpo",
value: InstructionOperation::SETPO,
},
InstructionOperationInfo {
name: "setl",
value: InstructionOperation::SETL,
},
InstructionOperationInfo {
name: "setge",
value: InstructionOperation::SETGE,
},
InstructionOperationInfo {
name: "setle",
value: InstructionOperation::SETLE,
},
InstructionOperationInfo {
name: "setg",
value: InstructionOperation::SETG,
},
InstructionOperationInfo {
name: "sqrtps",
value: InstructionOperation::SQRTPS,
},
InstructionOperationInfo {
name: "sqrtpd",
value: InstructionOperation::SQRTPD,
},
InstructionOperationInfo {
name: "sqrtsd",
value: InstructionOperation::SQRTSD,
},
InstructionOperationInfo {
name: "sqrtss",
value: InstructionOperation::SQRTSS,
},
InstructionOperationInfo {
name: "stosb",
value: InstructionOperation::STOSB,
},
InstructionOperationInfo {
name: "stosw",
value: InstructionOperation::STOSW,
},
InstructionOperationInfo {
name: "stosd",
value: InstructionOperation::STOSD,
},
InstructionOperationInfo {
name: "stosq",
value: InstructionOperation::STOSQ,
},
InstructionOperationInfo {
name: "subps",
value: InstructionOperation::SUBPS,
},
InstructionOperationInfo {
name: "subpd",
value: InstructionOperation::SUBPD,
},
InstructionOperationInfo {
name: "subsd",
value: InstructionOperation::SUBSD,
},
InstructionOperationInfo {
name: "subss",
value: InstructionOperation::SUBSS,
},
InstructionOperationInfo {
name: "xorps",
value: InstructionOperation::XORPS,
},
InstructionOperationInfo {
name: "xorpd",
value: InstructionOperation::XORPD,
},
InstructionOperationInfo {
name: "cmppd",
value: InstructionOperation::CMPPD,
},
InstructionOperationInfo {
name: "cmpps",
value: InstructionOperation::CMPPS,
},
InstructionOperationInfo {
name: "cmpss",
value: InstructionOperation::CMPSS,
},
InstructionOperationInfo {
name: "comisd",
value: InstructionOperation::COMISD,
},
InstructionOperationInfo {
name: "comiss",
value: InstructionOperation::COMISS,
},
InstructionOperationInfo {
name: "cvtdq2pd",
value: InstructionOperation::CVTDQ2PD,
},
InstructionOperationInfo {
name: "cvtdq2ps",
value: InstructionOperation::CVTDQ2PS,
},
InstructionOperationInfo {
name: "cvtpd2dq",
value: InstructionOperation::CVTPD2DQ,
},
InstructionOperationInfo {
name: "cvtpd2pi",
value: InstructionOperation::CVTPD2PI,
},
InstructionOperationInfo {
name: "cvtpd2ps",
value: InstructionOperation::CVTPD2PS,
},
InstructionOperationInfo {
name: "cvtpi2pd",
value: InstructionOperation::CVTPI2PD,
},
InstructionOperationInfo {
name: "cvtpi2ps",
value: InstructionOperation::CVTPI2PS,
},
InstructionOperationInfo {
name: "cvtps2dq",
value: InstructionOperation::CVTPS2DQ,
},
InstructionOperationInfo {
name: "cvtps2pd",
value: InstructionOperation::CVTPS2PD,
},
InstructionOperationInfo {
name: "cvtps2pi",
value: InstructionOperation::CVTPS2PI,
},
InstructionOperationInfo {
name: "cvtsd2si",
value: InstructionOperation::CVTSD2SI,
},
InstructionOperationInfo {
name: "cvtsd2ss",
value: InstructionOperation::CVTSD2SS,
},
InstructionOperationInfo {
name: "cvtsi2sd",
value: InstructionOperation::CVTSI2SD,
},
InstructionOperationInfo {
name: "cvtsi2ss",
value: InstructionOperation::CVTSI2SS,
},
InstructionOperationInfo {
name: "cvtss2sd",
value: InstructionOperation::CVTSS2SD,
},
InstructionOperationInfo {
name: "cvtss2si",
value: InstructionOperation::CVTSS2SI,
},
InstructionOperationInfo {
name: "cvttpd2dq",
value: InstructionOperation::CVTTPD2DQ,
},
InstructionOperationInfo {
name: "cvttpd2pi",
value: InstructionOperation::CVTTPD2PI,
},
InstructionOperationInfo {
name: "cvttps2dq",
value: InstructionOperation::CVTTPS2DQ,
},
InstructionOperationInfo {
name: "cvttps2pi",
value: InstructionOperation::CVTTPS2PI,
},
InstructionOperationInfo {
name: "cvttsd2si",
value: InstructionOperation::CVTTSD2SI,
},
InstructionOperationInfo {
name: "cvttss2si",
value: InstructionOperation::CVTTSS2SI,
},
InstructionOperationInfo {
name: "extractps",
value: InstructionOperation::EXTRACTPS,
},
InstructionOperationInfo {
name: "haddpd",
value: InstructionOperation::HADDPD,
},
InstructionOperationInfo {
name: "haddps",
value: InstructionOperation::HADDPS,
},
InstructionOperationInfo {
name: "hsubpd",
value: InstructionOperation::HSUBPD,
},
InstructionOperationInfo {
name: "hsubps",
value: InstructionOperation::HSUBPS,
},
InstructionOperationInfo {
name: "insertps",
value: InstructionOperation::INSERTPS,
},
InstructionOperationInfo {
name: "lddqu",
value: InstructionOperation::LDDQU,
},
InstructionOperationInfo {
name: "lgdt",
value: InstructionOperation::LGDT,
},
InstructionOperationInfo {
name: "lidt",
value: InstructionOperation::LIDT,
},
InstructionOperationInfo {
name: "lldt",
value: InstructionOperation::LLDT,
},
InstructionOperationInfo {
name: "lmsw",
value: InstructionOperation::LMSW,
},
InstructionOperationInfo {
name: "ltr",
value: InstructionOperation::LTR,
},
InstructionOperationInfo {
name: "maskmovq",
value: InstructionOperation::MASKMOVQ,
},
InstructionOperationInfo {
name: "maskmovdqu",
value: InstructionOperation::MASKMOVDQU,
},
InstructionOperationInfo {
name: "mmxnop",
value: InstructionOperation::MMXNOP,
},
InstructionOperationInfo {
name: "monitor",
value: InstructionOperation::MONITOR,
},
InstructionOperationInfo {
name: "movapd",
value: InstructionOperation::MOVAPD,
},
InstructionOperationInfo {
name: "movaps",
value: InstructionOperation::MOVAPS,
},
InstructionOperationInfo {
name: "movddup",
value: InstructionOperation::MOVDDUP,
},
InstructionOperationInfo {
name: "movdq2q",
value: InstructionOperation::MOVDQ2Q,
},
InstructionOperationInfo {
name: "movdqa",
value: InstructionOperation::MOVDQA,
},
InstructionOperationInfo {
name: "movdqu",
value: InstructionOperation::MOVDQU,
},
InstructionOperationInfo {
name: "movhlps",
value: InstructionOperation::MOVHLPS,
},
InstructionOperationInfo {
name: "movhpd",
value: InstructionOperation::MOVHPD,
},
InstructionOperationInfo {
name: "movhps",
value: InstructionOperation::MOVHPS,
},
InstructionOperationInfo {
name: "movshdup",
value: InstructionOperation::MOVSHDUP,
},
InstructionOperationInfo {
name: "movsldup",
value: InstructionOperation::MOVSLDUP,
},
InstructionOperationInfo {
name: "movlhps",
value: InstructionOperation::MOVLHPS,
},
InstructionOperationInfo {
name: "movlpd",
value: InstructionOperation::MOVLPD,
},
InstructionOperationInfo {
name: "movlps",
value: InstructionOperation::MOVLPS,
},
InstructionOperationInfo {
name: "movmskpd",
value: InstructionOperation::MOVMSKPD,
},
InstructionOperationInfo {
name: "movmskps",
value: InstructionOperation::MOVMSKPS,
},
InstructionOperationInfo {
name: "movntdq",
value: InstructionOperation::MOVNTDQ,
},
InstructionOperationInfo {
name: "movntdqa",
value: InstructionOperation::MOVNTDQA,
},
InstructionOperationInfo {
name: "movntpd",
value: InstructionOperation::MOVNTPD,
},
InstructionOperationInfo {
name: "movntps",
value: InstructionOperation::MOVNTPS,
},
InstructionOperationInfo {
name: "movntq",
value: InstructionOperation::MOVNTQ,
},
InstructionOperationInfo {
name: "movq2dq",
value: InstructionOperation::MOVQ2DQ,
},
InstructionOperationInfo {
name: "mwait",
value: InstructionOperation::MWAIT,
},
InstructionOperationInfo {
name: "pinsrb",
value: InstructionOperation::PINSRB,
},
InstructionOperationInfo {
name: "pinsrw",
value: InstructionOperation::PINSRW,
},
InstructionOperationInfo {
name: "pextrb",
value: InstructionOperation::PEXTRB,
},
InstructionOperationInfo {
name: "pextrw",
value: InstructionOperation::PEXTRW,
},
InstructionOperationInfo {
name: "pmovmskb",
value: InstructionOperation::PMOVMSKB,
},
InstructionOperationInfo {
name: "pmovsxbd",
value: InstructionOperation::PMOVSXBD,
},
InstructionOperationInfo {
name: "pmovsxbq",
value: InstructionOperation::PMOVSXBQ,
},
InstructionOperationInfo {
name: "pmovsxdq",
value: InstructionOperation::PMOVSXDQ,
},
InstructionOperationInfo {
name: "pmovsxbw",
value: InstructionOperation::PMOVSXBW,
},
InstructionOperationInfo {
name: "pmovsxwd",
value: InstructionOperation::PMOVSXWD,
},
InstructionOperationInfo {
name: "pmovsxwq",
value: InstructionOperation::PMOVSXWQ,
},
InstructionOperationInfo {
name: "pmovzxbd",
value: InstructionOperation::PMOVZXBD,
},
InstructionOperationInfo {
name: "pmovzxbq",
value: InstructionOperation::PMOVZXBQ,
},
InstructionOperationInfo {
name: "pmovzxdq",
value: InstructionOperation::PMOVZXDQ,
},
InstructionOperationInfo {
name: "pmovzxbw",
value: InstructionOperation::PMOVZXBW,
},
InstructionOperationInfo {
name: "pmovzxwd",
value: InstructionOperation::PMOVZXWD,
},
InstructionOperationInfo {
name: "pmovzxwq",
value: InstructionOperation::PMOVZXWQ,
},
InstructionOperationInfo {
name: "prefetch",
value: InstructionOperation::PREFETCH,
},
InstructionOperationInfo {
name: "prefetchnta",
value: InstructionOperation::PREFETCHNTA,
},
InstructionOperationInfo {
name: "prefetcht0",
value: InstructionOperation::PREFETCHT0,
},
InstructionOperationInfo {
name: "prefetcht1",
value: InstructionOperation::PREFETCHT1,
},
InstructionOperationInfo {
name: "prefetcht2",
value: InstructionOperation::PREFETCHT2,
},
InstructionOperationInfo {
name: "prefetchw",
value: InstructionOperation::PREFETCHW,
},
InstructionOperationInfo {
name: "pshufd",
value: InstructionOperation::PSHUFD,
},
InstructionOperationInfo {
name: "pshufhw",
value: InstructionOperation::PSHUFHW,
},
InstructionOperationInfo {
name: "pshuflw",
value: InstructionOperation::PSHUFLW,
},
InstructionOperationInfo {
name: "pshufw",
value: InstructionOperation::PSHUFW,
},
InstructionOperationInfo {
name: "punpcklbw",
value: InstructionOperation::PUNPCKLBW,
},
InstructionOperationInfo {
name: "punpckldq",
value: InstructionOperation::PUNPCKLDQ,
},
InstructionOperationInfo {
name: "punpcklwd",
value: InstructionOperation::PUNPCKLWD,
},
InstructionOperationInfo {
name: "roundsd",
value: InstructionOperation::ROUNDSD,
},
InstructionOperationInfo {
name: "roundss",
value: InstructionOperation::ROUNDSS,
},
InstructionOperationInfo {
name: "sgdt",
value: InstructionOperation::SGDT,
},
InstructionOperationInfo {
name: "sidt",
value: InstructionOperation::SIDT,
},
InstructionOperationInfo {
name: "sldt",
value: InstructionOperation::SLDT,
},
InstructionOperationInfo {
name: "shufpd",
value: InstructionOperation::SHUFPD,
},
InstructionOperationInfo {
name: "shufps",
value: InstructionOperation::SHUFPS,
},
InstructionOperationInfo {
name: "smsw",
value: InstructionOperation::SMSW,
},
InstructionOperationInfo {
name: "str",
value: InstructionOperation::STR,
},
InstructionOperationInfo {
name: "swapgs",
value: InstructionOperation::SWAPGS,
},
InstructionOperationInfo {
name: "ucomisd",
value: InstructionOperation::UCOMISD,
},
InstructionOperationInfo {
name: "ucomiss",
value: InstructionOperation::UCOMISS,
},
InstructionOperationInfo {
name: "unpckhpd",
value: InstructionOperation::UNPCKHPD,
},
InstructionOperationInfo {
name: "unpckhps",
value: InstructionOperation::UNPCKHPS,
},
InstructionOperationInfo {
name: "unpcklpd",
value: InstructionOperation::UNPCKLPD,
},
InstructionOperationInfo {
name: "unpcklps",
value: InstructionOperation::UNPCKLPS,
},
InstructionOperationInfo {
name: "verr",
value: InstructionOperation::VERR,
},
InstructionOperationInfo {
name: "verw",
value: InstructionOperation::VERW,
},
InstructionOperationInfo {
name: "vmcall",
value: InstructionOperation::VMCALL,
},
InstructionOperationInfo {
name: "vmclear",
value: InstructionOperation::VMCLEAR,
},
InstructionOperationInfo {
name: "vmlaunch",
value: InstructionOperation::VMLAUNCH,
},
InstructionOperationInfo {
name: "vmptrld",
value: InstructionOperation::VMPTRLD,
},
InstructionOperationInfo {
name: "vmptrst",
value: InstructionOperation::VMPTRST,
},
InstructionOperationInfo {
name: "vmresume",
value: InstructionOperation::VMRESUME,
},
InstructionOperationInfo {
name: "vmxoff",
value: InstructionOperation::VMXOFF,
},
InstructionOperationInfo {
name: "vmxon",
value: InstructionOperation::VMXON,
},
InstructionOperationInfo {
name: "xgetbv",
value: InstructionOperation::XGETBV,
},
InstructionOperationInfo {
name: "xsetbv",
value: InstructionOperation::XSETBV,
},
];
|
pub mod opa;
|
extern crate rustbox;
use std::io::{BufReader, BufRead, Write, Error, ErrorKind, Result};
use std::net::TcpStream;
use std::process::exit;
use rustbox::{Color, RustBox, Event, Key};
struct Constraint {
search_type: String,
search_term: String,
}
impl Constraint {
pub fn new(metadata_type: char ) -> Option<Constraint> {
match metadata_type {
' ' => Some("any"),
't' => Some("title"),
'T' => Some("track"),
'd' => Some("disc"),
'b' => Some("album"),
'a' => Some("artist"),
'A' => Some("albumartist"),
_ => None,
}.map( |s| Constraint {search_type: String::from(s), search_term: String::new() })
}
fn to_mpd_string(&self) -> String {
format!("{} \"{}\" ", self.search_type, self.search_term)
}
fn to_display_string(&self) -> String {
format!("{}: {}", self.search_type, self.search_term)
}
}
struct MPC {
connection: TcpStream,
reader: BufReader<TcpStream>,
}
impl MPC {
pub fn new() -> Result<MPC> {
let mut buf = String::new();
let write_conn = try!(TcpStream::connect("localhost:6600"));
let read_conn = try!(write_conn.try_clone());
//intentionally transferring ownership
let mut reader = BufReader::new(read_conn);
try!(reader.read_line(&mut buf));
if !buf.starts_with("OK") {
return Err(Error::new(ErrorKind::Other, "Mpd did not return OK"));
}
Ok(MPC {
connection: write_conn,
reader: reader
})
}
fn send_command(&mut self, command: &str) -> Result<Vec<String>> {
try!(self.connection.write(command.as_bytes()));
try!(self.connection.write("\n".as_bytes()));
let mut results: Vec<String> = Vec::new();
let mut buf = String::new();
loop {
buf.clear();
try!(self.reader.read_line(&mut buf));
if buf == "OK\n" {
return Ok(results);
} else if buf.starts_with("ACK") {
return Err(Error::new(ErrorKind::Other, "MPD Ack'd instead of OK'd"));
} else {
results.push(String::from(buf.trim()));
}
}
}
}
fn print_screen(constraints: &Vec<Constraint>, files: &Vec<String>, rustbox: &RustBox) {
let mut y = 1;
let height = rustbox.height();
rustbox.clear();
rustbox.print(0, 0, rustbox::RB_BOLD, Color::Default, Color::Default, "Filters:");
for constraint in constraints {
rustbox.print(0, y, rustbox::RB_BOLD, Color::Default, Color::Default, &constraint.to_display_string());
y += 1;
if y == (height - 1) {
rustbox.present();
return;
}
}
for file in files {
rustbox.print(0, y, rustbox::RB_BOLD, Color::Default, Color::Default, &file);
y += 1;
if y == (height - 1) {
rustbox.present();
return;
}
}
rustbox.present();
}
enum State {
NeedType,
NeedString,
ShouldExit,
ShouldCommit
}
impl State {
fn is_exit_state(&self) -> bool {
match *self {
State::ShouldExit => true,
State::ShouldCommit => true,
_ => false
}
}
}
fn main() {
let mut mpc = match MPC::new() {
Ok(m) => m,
Err(_) => panic!("Panicing here isn't too bad is it?")
};
let mut constraints: Vec<Constraint> = Vec::new();
let mut state = State::NeedType;
{
//using rustbox now
let rustbox = RustBox::init(Default::default()).ok().expect("Error initializing rustbox");
while !state.is_exit_state() {
//get update from mpd
let matched_files = match constraints.is_empty() {
true => Vec::new(),
false => {
let mut query = String::from("search ");
for constraint in &constraints {
query.push_str(&constraint.to_mpd_string());
};
mpc.send_command(&query).unwrap().into_iter().filter(|x| x.starts_with("file")).collect()
}
};
//update display
print_screen(&constraints, &matched_files, &rustbox);
//get input, discard everything but key events
let key = match rustbox.poll_event(false) {
Ok(Event::KeyEvent(key)) => key,
Ok(_) => None,
Err(e) => panic!("Error with rustbox {}",e)
};
//invariant: You can only be in State::NeedString if constraints is non-empty
state = match state {
State::ShouldExit => panic!("Unreachable code reached!"),
State::ShouldCommit => panic!("Unreachable code reached!"),
State::NeedType => match key {
None => State::NeedType,
Some(k) => match k {
Key::Esc => State::ShouldExit,
Key::Enter => State::ShouldCommit,
Key::Backspace => match constraints.pop() {
None => State::NeedType,
Some(_) => State::NeedString,
},
Key::Char(x) => match Constraint::new(x) {
Some(c) => {
constraints.push(c);
State::NeedString
},
None => State::NeedType
},
_ => State::NeedType
}
},
State::NeedString => match key {
None => State::NeedString,
Some(k) => match k {
Key::Esc => State::ShouldExit,
Key::Enter => State::NeedType,
Key::Backspace => match constraints.last_mut().unwrap().search_term.pop() {
None => {
constraints.pop();
State::NeedType
},
Some(_) => State::NeedString
},
Key::Char(x) if x.is_alphanumeric() || x.is_whitespace() => {
constraints.last_mut().unwrap().search_term.push(x);
State::NeedString
},
_ => State::NeedString
}
}
}
}
}//end using rustbox, needed if we want to print errors since rustbox hijacks the term
match state {
State::ShouldExit => { exit(0) },
_ => {}
}
//get the current playlist length
let last_pos: String = mpc.send_command("status").ok()
.map(Vec::into_iter)
.and_then(|mut x| x.find(|i| i.starts_with("playlistlength: ")))
.as_ref() //so we can later call into()
.and_then(|x| x.split_whitespace().last())
.map(|x| x.into())
.expect("Failed to get playlist length, aborting");
if constraints.is_empty() { //no songs, nothing to do
return ;
}
let mut query = String::from("searchadd ");
for constraint in &constraints {
query.push_str(&constraint.to_mpd_string());
};
mpc.send_command(&query).ok().expect("failed to submit new songs to play, aborting");
query = format!("play {}", last_pos);
mpc.send_command(&query).ok().expect("failed to submit play request to mpd. aborting");
}
|
use std::fmt::{Display, Debug};
fn printer<T: Display>(t: T) {
println!("{}", t);
}
struct S<T: Display>(T);
trait HasArea {
fn area(&self) -> f64;
}
#[derive(Debug)]
struct Rectangle {
length: f64,
height: f64,
}
impl HasArea for Rectangle {
fn area(&self) -> f64 {
self.length * self.height
}
}
#[allow(dead_code)]
struct Triangle {
length: f64,
height: f64,
}
fn print_debug<T: Debug>(t: &T) {
println!("{:?}", t);
}
fn area<T: HasArea>(t: &T) -> f64 {
t.area()
}
#[cfg(test)]
mod tests {
use crate::bounds::{Rectangle, Triangle, print_debug, area};
#[test]
fn main() {
// Error: Vec<T> doesn't implement `Display`
// let s = S(vec![1]);
let rectangle = Rectangle { length: 3.0, height: 4.0 };
let _triangle = Triangle { length: 3.0, height: 4.0 };
print_debug(&rectangle);
println!("Area: {}", area(&rectangle));
// Error: Triangle doesn't implement `Debug` or `HasArea`
// print_debug(&_triangle);
// println!("Area: {}", area(&_triangle));
}
}
#[cfg(test)]
mod empty_bounds {
struct Cardinal;
struct BlueJay;
struct Turkey;
trait Red {}
trait Blue {}
impl Red for Cardinal {}
impl Blue for BlueJay {}
fn red<T: Red>(_: &T) -> &'static str { "red" }
fn blue<T: Blue>(_: &T) -> &'static str { "blue" }
#[test]
fn main() {
let c = Cardinal;
let b = BlueJay;
let _t = Turkey;
println!("A cardinal is {}", red(&c));
println!("A blue jay is {}", blue(&b));
// Error!
// println!("A turkey is {}", red(&_t));
}
}
|
/*
* Slack Web API
*
* One way to interact with the Slack platform is its HTTP RPC-based Web API, a collection of methods requiring OAuth 2.0-based user, bot, or workspace tokens blessed with related OAuth scopes.
*
* The version of the OpenAPI document: 1.7.0
*
* Generated by: https://openapi-generator.tech
*/
use reqwest;
use crate::apis::ResponseContent;
use super::{Error, configuration};
/// struct for typed errors of method `dnd_end_dnd`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DndEndDndError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `dnd_end_snooze`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DndEndSnoozeError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `dnd_info`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DndInfoError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `dnd_set_snooze`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DndSetSnoozeError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `dnd_team_info`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DndTeamInfoError {
DefaultResponse(::std::collections::HashMap<String, serde_json::Value>),
UnknownValue(serde_json::Value),
}
/// Ends the current user's Do Not Disturb session immediately.
pub async fn dnd_end_dnd(configuration: &configuration::Configuration, token: &str) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<DndEndDndError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/dnd.endDnd", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<DndEndDndError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Ends the current user's snooze mode immediately.
pub async fn dnd_end_snooze(configuration: &configuration::Configuration, token: &str) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<DndEndSnoozeError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/dnd.endSnooze", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
local_var_req_builder = local_var_req_builder.header("token", token.to_string());
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<DndEndSnoozeError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Retrieves a user's current Do Not Disturb status.
pub async fn dnd_info(configuration: &configuration::Configuration, token: Option<&str>, user: Option<&str>) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<DndInfoError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/dnd.info", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = token {
local_var_req_builder = local_var_req_builder.query(&[("token", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = user {
local_var_req_builder = local_var_req_builder.query(&[("user", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<DndInfoError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Turns on Do Not Disturb mode for the current user, or changes its duration.
pub async fn dnd_set_snooze(configuration: &configuration::Configuration, token: &str, num_minutes: &str) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<DndSetSnoozeError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/dnd.setSnooze", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let mut local_var_form_params = std::collections::HashMap::new();
local_var_form_params.insert("token", token.to_string());
local_var_form_params.insert("num_minutes", num_minutes.to_string());
local_var_req_builder = local_var_req_builder.form(&local_var_form_params);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<DndSetSnoozeError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Retrieves the Do Not Disturb status for up to 50 users on a team.
pub async fn dnd_team_info(configuration: &configuration::Configuration, token: Option<&str>, users: Option<&str>) -> Result<::std::collections::HashMap<String, serde_json::Value>, Error<DndTeamInfoError>> {
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/dnd.teamInfo", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = token {
local_var_req_builder = local_var_req_builder.query(&[("token", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = users {
local_var_req_builder = local_var_req_builder.query(&[("users", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_token) = configuration.oauth_access_token {
local_var_req_builder = local_var_req_builder.bearer_auth(local_var_token.to_owned());
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<DndTeamInfoError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
|
// Copyright 2022 The Tari Project
// SPDX-License-Identifier: BSD-3-Clause
//! The robotic innards of a Diffie-Hellman key exchange (DHKE) producing a shared secret.
//! Even though the result of a DHKE is the same type as a public key, it is typically treated as a secret value.
//! To make this work more safely, we ensure that a DHKE result is cleared after use (but beware of subsequent copies or
//! moves). Because a DHKE shared secret is intended to be used in further key derivation, the only visibility into it
//! is as a byte array; it's not possible to directly extract the underlying public key type, and you probably shouldn't
//! clone the byte array without a very good reason. If you need the underlying public key itself, you probably should
//! be using something else.
use core::ops::Mul;
use zeroize::Zeroize;
use crate::keys::PublicKey;
/// A type to hold a DH secret key.
pub struct DiffieHellmanSharedSecret<P>(P)
where P: Zeroize;
impl<P> DiffieHellmanSharedSecret<P>
where
P: PublicKey + Zeroize,
for<'a> &'a <P as PublicKey>::K: Mul<&'a P, Output = P>,
{
/// Perform a Diffie-Hellman key exchange
pub fn new(sk: &P::K, pk: &P) -> Self {
Self(sk * pk)
}
/// Get the shared secret as a byte array
pub fn as_bytes(&self) -> &[u8] {
self.0.as_bytes()
}
}
impl<P> Zeroize for DiffieHellmanSharedSecret<P>
where P: Zeroize
{
/// Zeroize the shared secret's underlying public key
fn zeroize(&mut self) {
self.0.zeroize();
}
}
impl<P> Drop for DiffieHellmanSharedSecret<P>
where P: Zeroize
{
/// Zeroize the shared secret when out of scope or otherwise dropped
fn drop(&mut self) {
self.zeroize();
}
}
|
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
use std::env;
use std::fs;
use std::io::{Write, Result};
use std::path::Path;
#[derive(Deserialize, Debug)]
struct ChromeDbgEvent {
name: String,
description: Option<String>,
parameters: Option<Vec<ChromeDbgTypeDecl>>,
experimental: Option<bool>,
}
#[derive(Deserialize, Debug)]
struct ChromeDbgCommand {
name: String,
description: Option<String>,
parameters: Option<Vec<ChromeDbgTypeDecl>>,
returns: Option<Vec<ChromeDbgTypeDecl>>,
experimental: Option<bool>,
}
#[derive(Deserialize, Debug)]
struct ChromeDbgTypeDecl {
id: Option<String>,
#[serde(rename = "type")]
_type: Option<String>,
optional: Option<bool>,
#[serde(rename = "$ref")]
_ref: Option<String>,
items: Option<Box<ChromeDbgTypeDecl>>,
#[serde(rename = "enum")]
_enum: Option<Vec<String>>,
description: Option<String>,
name: Option<String>,
properties: Option<Vec<ChromeDbgTypeDecl>>,
}
impl ChromeDbgTypeDecl {
fn type_id(&self, absprefix: &str, relprefix: &str) -> Option<String> {
self.type_id_with_box(None, absprefix, relprefix)
}
fn type_id_with_box(&self, parent_type: Option<&str>, absprefix: &str, relprefix: &str) -> Option<String> {
let mut s = String::new();
if let Some(ref t) = self._type {
match t.as_str() {
"boolean" => s.push_str("bool"),
"string" => s.push_str("String"),
"integer" => s.push_str("i64"),
"number" => s.push_str("f64"),
"array" => {
if let Some(t) = self.items.as_ref().and_then(|t| t.type_id(absprefix, relprefix)) {
s = format!("Vec<{}>", t);
} else {
return None;
}
}
"any" => s.push_str("JsonValue"),
"object" => s.push_str("JsonValue"),
_ => return None,
}
} else if let Some(ref r) = self._ref {
if r.contains(".") {
// r is an absolute reference
s = format!("{}{}", absprefix, &r.replace('.', "::"));
} else {
// r is a relative reference
s = format!("{}{}", relprefix, r);
}
} else {
return None;
}
if Some(s.as_str()) == parent_type {
s = format!("Box<{}>", s);
}
if self.optional.unwrap_or(false) {
Some(format!("Option<{}>", s))
} else {
Some(s)
}
}
}
#[derive(Deserialize)]
struct ChromeDbgDomain {
domain: String,
experimental: Option<bool>,
commands: Vec<ChromeDbgCommand>,
events: Option<Vec<ChromeDbgEvent>>,
types: Option<Vec<ChromeDbgTypeDecl>>,
}
impl ChromeDbgDomain {
fn genrust(&self, w: &mut Write) -> Result<()> {
writeln!(w, "pub mod {} {{", self.domain)?;
writeln!(w, " #[allow(unused_imports)] use serde_json::Value as JsonValue;").unwrap();
writeln!(w, " #[allow(unused_imports)] use super::Nothing;").unwrap();
writeln!(w, " use super::*;").unwrap();
if let Some(ref types) = self.types {
for dtype in types {
if let Some(ref variants) = dtype._enum {
// TODO Sadly serde does not support a fallback variant for enums
// see https://github.com/serde-rs/serde/issues/912 for a workaround
if let Some(ref d) = dtype.description {
writeln!(w, " /// {}", d.trim())?;
}
writeln!(w, " #[derive(Deserialize, Debug, Serialize)]")?;
writeln!(w, " pub enum {} {{", dtype.id.as_ref().expect("Domain type has no id"))?;
for var in variants {
writeln!(w, r#" #[serde(rename = "{}")]"#, var)?;
writeln!(w, " _{},", var.replace('-', "_"))?;
}
writeln!(w, " }}" )?;
} else if let Some(ref properties) = dtype.properties {
if let Some(ref d) = dtype.description {
writeln!(w, " /// {}", d.trim())?;
}
let dtype_id = dtype.id.as_ref().expect("Domain type has no id");
writeln!(w, " #[derive(Deserialize, Debug, Serialize)]")?;
writeln!(w, " pub struct {} {{", dtype_id)?;
for prop in properties {
let name = prop.name.as_ref().expect("Type property has no name");
if let Some(t) = prop.type_id_with_box(Some(&dtype_id), "super::", "") {
writeln!(w, r#" #[serde(rename = "{}")]"#, name)?;
writeln!(w, " pub _{}: {},", name, t)?;
}
}
writeln!(w, " }}" )?;
} else if let Some(t) = dtype.type_id("super::", "") {
writeln!(w, " pub type {} = {};", dtype.id.as_ref().expect("Domain type has no id"), t)?;
} else {
writeln!(w, " pub type {} = JsonValue;", dtype.id.as_ref().expect("Domain type has no id"))?;
}
}
}
let mut cmd_type_info = Vec::new();
// commands
for cmd in &self.commands {
// Create a return type for this command, or use Nothing
let return_type_name = match cmd.returns.as_ref().map(Vec::as_slice).unwrap_or(&[]) {
[] => "Nothing".to_string(),
v => {
writeln!(w, " #[derive(Deserialize, Debug)]")?;
writeln!(w, " pub struct ReturnType_{} {{", cmd.name)?;
for r in v {
let name = r.name.as_ref().expect("Return type attr has no name");
writeln!(w, r#" #[serde(rename = "{}")]"#, name)?;
writeln!(w, " pub {}: {},",
name,
r.type_id("super::", "").as_ref().expect("Cannot determine return type"))?;
}
writeln!(w, " }}")?;
format!("ReturnType_{}", cmd.name)
}
};
// Create a request type for this command, this can be private since it is
// only used later in the generated api functions.
let request_type = if let Some(ref types) = cmd.parameters {
writeln!(w, r#" #[derive(Serialize, Debug)]
struct Request_{} {{"#, cmd.name)?;
for ty in types {
if let Some(ref s) = ty.description {
writeln!(w, " /// {}", s)?;
}
let ty_name = ty.name.as_ref().expect("Argument has no name");
writeln!(w, r#" #[serde(rename = "{}")]"#, ty_name)?;
if ty.optional.unwrap_or(false) {
// Dont serialize optional arguments. By default serde uses null.
writeln!(w, r#" #[serde(skip_serializing_if = "Option::is_none")]"#)?;
}
writeln!(w, " _{}: {},",
ty_name,
ty.type_id("super::", "").expect("Cannot determine type for argument"))?;
}
writeln!(w, " }}")?;
format!("Request_{}", cmd.name)
} else {
"Nothing".to_string()
};
cmd_type_info.push((cmd, request_type, return_type_name));
}
// a domain trait for the sync api
writeln!(w, r#" pub trait {}Api {{"#, &self.domain)?;
for (cmd, _, return_type_name) in &cmd_type_info {
if let Some(ref d) = cmd.description {
writeln!(w, " /// {}", d.trim())?;
}
write!(w, r#" fn {}(&mut self"#, cmd.name)?;
if let Some(ref types) = cmd.parameters {
for ty in types {
write!(w, ", _{}: {}",
ty.name.as_ref().expect("Argument has no name"),
ty.type_id("super::", "").expect("Cannot determine type for argument"))?;
}
}
writeln!(w, r#") -> Result<{}, ClientError>;"#, return_type_name)?;
}
writeln!(w, r#" }}"#)?;
writeln!(w, r#" impl {}Api for DebugClient {{"#, &self.domain)?;
for (cmd, request_type, return_type_name) in &cmd_type_info {
write!(w, r#" fn {}(&mut self"#, cmd.name)?;
if let Some(ref types) = cmd.parameters {
for ty in types {
write!(w, ", _{}: {}",
ty.name.as_ref().expect("Argument has no name"),
ty.type_id("super::", "").expect("Cannot determine type for argument"))?;
}
}
let fullname = format!("{}.{}", &self.domain, cmd.name);
writeln!(w, r#") -> Result<{}, ClientError> {{"#, return_type_name)?;
write!(w, r#" self.call("{}", {} {{"#, fullname, request_type)?;
if let Some(ref types) = cmd.parameters {
for (idx, ty) in types.iter().enumerate() {
if idx != 0 {
write!(w, ",")?;
}
write!(w, "_{}", ty.name.as_ref().expect("Argument type is missing a name"))?;
}
}
writeln!(w, r#"}})"#)?;
writeln!(w, r#" }}"#)?;
}
writeln!(w, r#" }}"#)?;
writeln!(w, "}} // {}", self.domain)?;
Ok(())
}
}
#[derive(Deserialize)]
struct ChromeDbgProto {
domains: Vec<ChromeDbgDomain>,
}
impl ChromeDbgProto {
fn genrust(&self, f: &mut Write) -> Result<()> {
writeln!(f, r#"
use DebugClient;
use Error as ClientError;
use serde;
use serde_json::Value as JsonValue;
/// A dummy type for commands that return nothing
#[derive(Serialize, Deserialize, Debug)]
pub struct Nothing {{
}}
fn deserialize_unit_enum<'de, D: serde::Deserializer<'de>>(_d: D) -> Result<(), D::Error> {{
Ok(())
}}
"#)?;
for domain in &self.domains {
domain.genrust(f)
.expect("Error writing src/proto.rs");
writeln!(f, "pub use self::{}::{}Api;", &domain.domain, &domain.domain)?;
}
writeln!(f, "#[derive(Deserialize, Debug)]")?;
writeln!(f, r#"#[serde(tag = "method", content = "params")]"#)?;
writeln!(f, "pub enum Event {{")?;
for domain in &self.domains {
if let Some(ref events) = domain.events {
for ev in events {
let fullname = format!("{}.{}", &domain.domain, ev.name);
if let Some(ref s) = ev.description {
writeln!(f, " /// {}", s)?;
}
writeln!(f, r#" #[serde(rename = "{}")]"#, fullname)?;
if let Some(ref types) = ev.parameters {
writeln!(f, r#" {}_{} {{"#, &domain.domain, ev.name)?;
for ty in types {
let name = ty.name.as_ref().expect("Argument has no name");
let fixed_name = match name.as_str() {
"type" => "_type",
_ => name,
};
writeln!(f, r#" #[serde(rename = "{}")]
{}: {},"#,
name,
fixed_name,
ty.type_id("", &format!("{}::", &domain.domain)).expect("Argument has no type"))?;
}
writeln!(f, r#" }},"#)?;
} else {
// when serde decodes these unit variants it does not accept a map, but
// that is what chrome hands us
writeln!(f, r#" #[serde(deserialize_with="deserialize_unit_enum")]"#)?;
writeln!(f, r#" {}_{},"#, &domain.domain, ev.name)?;
}
}
}
}
writeln!(f, "}}" )
}
}
const SOURCE: &'static str = "src/chrome_protocol.json";
fn main() {
let out_dir = env::var("OUT_DIR").unwrap();
let dest_path = Path::new(&out_dir).join("proto.rs");
println!("rerun-if-changed={}", SOURCE);
let mut f = fs::File::open(SOURCE).expect("Failed to open protocol json");
let p: ChromeDbgProto = serde_json::from_reader(&mut f)
.expect("Error parsing protocol json");
let mut f = fs::OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(dest_path)
.expect("Unable to open src/proto.rs for writing");
p.genrust(&mut f).unwrap();
}
|
use optimized_square_free;
#[no_mangle]
pub extern "C" fn is_square_free(base: i32, exponent: i32, sub: i32) -> i32 {
if optimized_square_free::is_square_free(optimized_square_free::convert_input(base, exponent, sub)) {
1_i32
} else {
0_i32
}
}
#[cfg(test)]
mod tests {
use super::is_square_free;
#[test]
fn not_square_free() {
assert_eq!(0, is_square_free(2, 4, 0));
}
#[test]
fn square_free() {
assert_eq!(1, is_square_free(3, 2, 2));
}
}
|
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::ast::ExprKind;
use crate::ast::Stmt;
use crate::ast::StmtKind;
use crate::lexer::Lexer;
use crate::lexer::SourcePosition;
use crate::lexer::TokenPosition;
use crate::lexer::TokenType;
use crate::parser::Parser;
use crate::parser::Program;
use lsp_types::Position;
use lsp_types::Range;
use lsp_types::TextEdit;
use std::collections::HashMap;
pub fn rename(source: &str, pos: Position, new_name: &str) -> Result<Vec<TextEdit>, ()> {
let mut parser = Parser::new(Lexer::new(source));
let program = parser.parse();
let mut rename_op = Rename::new();
rename_op.visit(&program, &parser);
rename_op.rename(&parser, pos, new_name)
}
fn token_position_to_range(position: &TokenPosition) -> Range {
Range {
start: source_position_to_position(&position.start),
end: source_position_to_position(&position.end),
}
}
fn source_position_to_position(position: &SourcePosition) -> Position {
Position {
line: position.line as u64,
character: position.character as u64,
}
}
struct Rename {
token_to_positions: HashMap<String, Vec<TokenPosition>>,
}
impl Rename {
fn new() -> Rename {
return Rename {
token_to_positions: HashMap::new(),
};
}
fn visit(&mut self, program: &Program, parser: &Parser) {
for stmt in &program.statements {
self.visit_statement(&stmt, parser);
}
}
fn visit_statement(&mut self, stmt: &Stmt, parser: &Parser) {
match &stmt.kind {
// Statement::Let(stmt) => {
// let positions = self.token_to_positions.entry(stmt.name().to_string()).or_insert(Vec::new());
// positions.push(parser.resolve_location(stmt.name_location().clone()));
// }
StmtKind::Call(stmt) => {
for expr in &stmt.arguments {
self.visit_expression(&expr.kind, parser)
}
}
_ => {}
}
}
fn visit_expression(&mut self, expr: &ExprKind, parser: &Parser) {
match expr {
ExprKind::Identifier(expr) => {
let positions = self
.token_to_positions
.entry(expr.name().to_string())
.or_insert(Vec::new());
positions.push(parser.resolve_location(expr.name_location().clone()));
}
_ => {}
}
}
pub fn rename(
&self,
parser: &Parser,
pos: Position,
new_name: &str,
) -> Result<Vec<TextEdit>, ()> {
let token = parser.find_token(SourcePosition {
line: pos.line as i32,
character: pos.character as i32,
})?;
if token.token_type != TokenType::Ident {
return Err(());
}
let val = parser.identifier_name(&token);
let positions = &self.token_to_positions[&val];
let mut edits = Vec::new();
for pos in positions {
edits.push(TextEdit {
new_text: new_name.to_string(),
range: token_position_to_range(pos),
});
}
Ok(edits)
}
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::Range;
use pretty_assertions::assert_eq;
// This is still WIP.
#[test]
fn test() {
let res = rename(
"let l:a = 5\ncall echo(l:a)",
Position {
line: 0,
character: 5,
},
"l:b",
)
.unwrap();
assert_eq!(
res,
&[
// TextEdit {
// range: Range {
// start: Position {
// line: 0,
// character: 4,
// },
// end: Position {
// line: 0,
// character: 7,
// },
// },
// new_text: "l:b".to_string(),
// },
TextEdit {
range: Range {
start: Position {
line: 1,
character: 10,
},
end: Position {
line: 1,
character: 13,
},
},
new_text: "l:b".to_string(),
}
]
);
}
}
|
//! Convenience re-export of common members
//!
//! Like the standard library's prelude, this module simplifies importing of
//! common items such as traits. Unlike the standard prelude, the contents of
//! this module must be imported manually.
//!
//! # Examples
//! ```rust
//! use chbs::{config::BasicConfig, prelude::*};
//!
//! let config = BasicConfig::default();
//!
//! // This method requires the ToScheme trait, imported through prelude
//! let scheme = config.to_scheme();
//! ```
pub use crate::component::traits::*;
pub use crate::entropy::HasEntropy;
pub use crate::scheme::ToScheme;
|
/// EditGitHookOption options when modifying one Git hook
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct EditGitHookOption {
pub content: Option<String>,
}
impl EditGitHookOption {
/// Create a builder for this object.
#[inline]
pub fn builder() -> EditGitHookOptionBuilder {
EditGitHookOptionBuilder {
body: Default::default(),
}
}
#[inline]
pub fn repo_edit_git_hook() -> EditGitHookOptionPatchBuilder<crate::generics::MissingOwner, crate::generics::MissingRepo, crate::generics::MissingId> {
EditGitHookOptionPatchBuilder {
inner: Default::default(),
_param_owner: core::marker::PhantomData,
_param_repo: core::marker::PhantomData,
_param_id: core::marker::PhantomData,
}
}
}
impl Into<EditGitHookOption> for EditGitHookOptionBuilder {
fn into(self) -> EditGitHookOption {
self.body
}
}
impl Into<EditGitHookOption> for EditGitHookOptionPatchBuilder<crate::generics::OwnerExists, crate::generics::RepoExists, crate::generics::IdExists> {
fn into(self) -> EditGitHookOption {
self.inner.body
}
}
/// Builder for [`EditGitHookOption`](./struct.EditGitHookOption.html) object.
#[derive(Debug, Clone)]
pub struct EditGitHookOptionBuilder {
body: self::EditGitHookOption,
}
impl EditGitHookOptionBuilder {
#[inline]
pub fn content(mut self, value: impl Into<String>) -> Self {
self.body.content = Some(value.into());
self
}
}
/// Builder created by [`EditGitHookOption::repo_edit_git_hook`](./struct.EditGitHookOption.html#method.repo_edit_git_hook) method for a `PATCH` operation associated with `EditGitHookOption`.
#[repr(transparent)]
#[derive(Debug, Clone)]
pub struct EditGitHookOptionPatchBuilder<Owner, Repo, Id> {
inner: EditGitHookOptionPatchBuilderContainer,
_param_owner: core::marker::PhantomData<Owner>,
_param_repo: core::marker::PhantomData<Repo>,
_param_id: core::marker::PhantomData<Id>,
}
#[derive(Debug, Default, Clone)]
struct EditGitHookOptionPatchBuilderContainer {
body: self::EditGitHookOption,
param_owner: Option<String>,
param_repo: Option<String>,
param_id: Option<String>,
}
impl<Owner, Repo, Id> EditGitHookOptionPatchBuilder<Owner, Repo, Id> {
/// owner of the repo
#[inline]
pub fn owner(mut self, value: impl Into<String>) -> EditGitHookOptionPatchBuilder<crate::generics::OwnerExists, Repo, Id> {
self.inner.param_owner = Some(value.into());
unsafe { std::mem::transmute(self) }
}
/// name of the repo
#[inline]
pub fn repo(mut self, value: impl Into<String>) -> EditGitHookOptionPatchBuilder<Owner, crate::generics::RepoExists, Id> {
self.inner.param_repo = Some(value.into());
unsafe { std::mem::transmute(self) }
}
/// id of the hook to get
#[inline]
pub fn id(mut self, value: impl Into<String>) -> EditGitHookOptionPatchBuilder<Owner, Repo, crate::generics::IdExists> {
self.inner.param_id = Some(value.into());
unsafe { std::mem::transmute(self) }
}
#[inline]
pub fn content(mut self, value: impl Into<String>) -> Self {
self.inner.body.content = Some(value.into());
self
}
}
impl<Client: crate::client::ApiClient + Sync + 'static> crate::client::Sendable<Client> for EditGitHookOptionPatchBuilder<crate::generics::OwnerExists, crate::generics::RepoExists, crate::generics::IdExists> {
type Output = crate::git_hook::GitHook;
const METHOD: http::Method = http::Method::PATCH;
fn rel_path(&self) -> std::borrow::Cow<'static, str> {
format!("/repos/{owner}/{repo}/hooks/git/{id}", owner=self.inner.param_owner.as_ref().expect("missing parameter owner?"), repo=self.inner.param_repo.as_ref().expect("missing parameter repo?"), id=self.inner.param_id.as_ref().expect("missing parameter id?")).into()
}
fn modify(&self, req: Client::Request) -> Result<Client::Request, crate::client::ApiError<Client::Response>> {
use crate::client::Request;
Ok(req
.json(&self.inner.body))
}
}
|
pub mod font;
pub mod fullscreen_scroller;
pub mod vertical_scroller;
use ssd1963::{Bounds, Display};
use self::{font::MonoFont, fullscreen_scroller::FullscreenVerticalScroller, vertical_scroller::Scroller};
use core::{
convert::{TryFrom, TryInto},
ops::RangeBounds,
};
// pub fn text_to_pixels<'a, 'font: 'a, Font: font::MonoFont>(_font: &'font Font, text: &'a str) -> impl Iterator<Item = bool> + 'a {
// text.chars().flat_map(move |ch| get_bits(_font, ch))
// }
pub fn get_bits<'font, Font: font::MonoFont>(_font: &'font Font, ch: char) -> impl Iterator<Item = bool> {
let mut ch = u32::from(ch);
if ch < 32 || ch > 127 {
ch = 127
}
let ch = ch as u8;
let bits_per_char = u16::from(Font::CHAR_HEIGHT) * u16::from(Font::CHAR_WIDTH);
let bit_offset = (u16::from(ch) - 32) * bits_per_char;
let byte_offset = bit_offset / 8;
let bit_offset = (bit_offset % 8) as u8;
CharPixelIter {
data: &Font::data()[usize::from(byte_offset)..],
bit_offset,
count: bits_per_char,
}
}
pub fn get_bits_transposed<'font, Font: font::MonoFont>(_font: &'font Font, ch: char) -> impl Iterator<Item = bool> + 'font {
let mut ch = u32::from(ch);
if ch < 32 || ch > 127 {
ch = 127
}
let ch = ch as u8;
let bits_per_char = u16::from(Font::CHAR_HEIGHT) * u16::from(Font::CHAR_WIDTH);
let bit_offset = (u16::from(ch) - 32) * bits_per_char;
CharPixelTransIter {
data: &Font::data(),
bit_offset,
row: 0,
col: 0,
_font,
}
}
pub struct CharPixelIter {
data: &'static [u8],
bit_offset: u8,
count: u16,
}
impl Iterator for CharPixelIter {
type Item = bool;
fn next(&mut self) -> Option<Self::Item> {
if self.count == 0 {
return None;
}
self.count -= 1;
let bit = self.data[0] & (1 << self.bit_offset);
if self.bit_offset == 7 {
self.bit_offset = 0;
self.data = &self.data[1..];
} else {
self.bit_offset += 1;
}
Some(if bit == 0 { false } else { true })
}
}
pub struct CharPixelTransIter<'font, Font> {
data: &'font [u8],
bit_offset: u16,
row: u8,
col: u8,
_font: &'font Font,
}
impl<'font, Font: MonoFont> Iterator for CharPixelTransIter<'font, Font> {
type Item = bool;
fn next(&mut self) -> Option<Self::Item> {
if self.row == Font::CHAR_WIDTH {
self.col += 1;
if self.col == Font::CHAR_HEIGHT {
return None;
}
self.row = 0;
}
let bit_offset = self.bit_offset + u16::from(self.row) * u16::from(Font::CHAR_WIDTH) + u16::from(self.col);
self.row += 1;
let byte_offset = bit_offset / 8;
let bit_offset = bit_offset % 8;
let bit = self.data[usize::from(byte_offset)] & (1 << bit_offset);
Some(if bit == 0 { false } else { true })
}
}
fn display_size<Disp: Display>(_display: &Disp) -> Bounds {
Bounds {
x_start: 0,
x_end: Disp::WIDTH - 1,
y_start: 0,
y_end: Disp::HEIGHT - 1,
}
}
pub struct Term<'me, Disp: Display, Font, Scroller> {
display: &'me mut Disp,
font: &'me Font,
scroller: Scroller,
bgcolor: Disp::Color,
fgcolor: Disp::Color,
bounds: Bounds,
line_offset: u16,
column_offset: u16,
start_with_newline: bool,
}
impl<'me, Disp, Font, Scroll> Term<'me, Disp, Font, Scroll>
where
Disp: Display<Color = u16>, // TODO: properly implement Color and remove the Color = u16 constrain
Font: MonoFont,
Scroll: Scroller<Disp>,
{
pub fn new(display: &'me mut Disp, font: &'me Font, scroller: Scroll) -> Self {
Self {
font,
scroller,
bgcolor: 0u16,
fgcolor: 0b1111111111111111u16,
bounds: display_size(display),
display,
line_offset: 0,
column_offset: 0,
start_with_newline: false,
}
}
// panics if requested dimensions are greater than display size
pub fn dimensions<X, Y>(mut self, x: X, y: Y) -> Self
where
X: RangeBounds<u16>,
Y: RangeBounds<u16>,
{
self.bounds = Bounds::new_within(x, y, &display_size(self.display)).unwrap();
self
}
fn scroll_up(&mut self, by: u16) -> Result<(), Disp::Error> {
let by = -i16::try_from(by).unwrap();
self.scroller
.scroll_area(self.display, self.bounds.range_horiz(), self.bounds.range_vert(), 0, by)
}
pub fn write(&mut self, text: &str) {
let line_len = (Disp::WIDTH / u16::from(Font::CHAR_WIDTH)).try_into().unwrap();
let mut chars = SplitByLenOrNewline::new(text, line_len);
loop {
match chars.next() {
None => return,
Some(CharOrNewline::NewLine) => self.start_with_newline = true,
Some(CharOrNewline::Char(c)) => {
if self.start_with_newline {
let mut remaininig_area = self.bounds.clone();
remaininig_area.x_start += self.column_offset;
remaininig_area.y_start += self.line_offset;
remaininig_area.set_height(u16::from(Font::CHAR_HEIGHT));
self.display
.fill_area(
remaininig_area.range_horiz(),
remaininig_area.range_vert(),
&mut core::iter::repeat(self.bgcolor),
)
.ok();
// is there space for another line after this one?
let remaining_height = self.bounds.height() - self.line_offset - u16::from(Font::CHAR_HEIGHT);
self.line_offset = if remaining_height < u16::from(Font::CHAR_HEIGHT) {
self.scroll_up(u16::from(Font::CHAR_HEIGHT) - remaining_height).ok();
self.bounds.height() - u16::from(Font::CHAR_HEIGHT)
} else {
self.line_offset + u16::from(Font::CHAR_HEIGHT)
};
self.start_with_newline = false;
self.column_offset = 0;
}
let (fg, bg) = (self.fgcolor, self.bgcolor);
let mut bits = get_bits_transposed(self.font, c).map(move |b| if b { fg } else { bg });
let end_column_offset = self.column_offset + u16::from(Font::CHAR_WIDTH);
let end_line_offset = self.line_offset + u16::from(Font::CHAR_HEIGHT);
let mut abc = self.bounds.clone();
abc.x_start += self.column_offset;
abc.y_start += self.line_offset;
abc.set_height(u16::from(Font::CHAR_HEIGHT));
abc.set_width(u16::from(Font::CHAR_WIDTH));
self.display.fill_area(abc.range_horiz(), abc.range_vert(), &mut bits).ok();
self.column_offset = end_column_offset - 1;
}
}
}
}
}
impl<'a, Disp, Font, Scroll> core::fmt::Write for Term<'a, Disp, Font, Scroll>
where
Disp: Display<Color = u16>, // TODO: properly implement Color and remove the Color = u16 constrain
Font: MonoFont,
Scroll: Scroller<Disp>,
{
fn write_str(&mut self, s: &str) -> core::fmt::Result {
Ok(self.write(s))
}
}
struct SplitByLenOrNewline<'a> {
chars: core::str::Chars<'a>,
line_len: u8,
line_offset: u8,
}
enum CharOrNewline {
Char(char),
NewLine,
}
impl<'a> SplitByLenOrNewline<'a> {
pub fn new(text: &'a str, line_len: u8) -> Self {
Self {
chars: text.chars(),
line_len,
line_offset: 0,
}
}
pub fn line_offset(&self) -> u8 {
self.line_offset
}
}
impl<'a> Iterator for SplitByLenOrNewline<'a> {
type Item = CharOrNewline;
fn next(&mut self) -> Option<Self::Item> {
if self.line_offset < self.line_len {
match self.chars.next() {
None => None,
Some('\n') | Some('\r') => {
self.line_offset = 0;
Some(CharOrNewline::NewLine)
}
Some(ch) => Some(CharOrNewline::Char(ch)),
}
} else {
self.line_offset = 0;
Some(CharOrNewline::NewLine)
}
}
}
|
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::io::prelude::*;
// typedef struct {
// char idlength;
// char colourmaptype;
// char datatypecode; // 2
// short int colourmaporigin;
// short int colourmaplength;
// char colourmapdepth;
// short int x_origin;
// short int y_origin;
// short width;
// short height;
// char bitsperpixel;
// char imagedescriptor;
// } HEADER;
fn save_tgc(image : &[[u8; 25]; 6], height : usize, width : usize) -> std::io::Result<()> {
let mut file = File::create("message.tga")?;
let tga_header = [0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, (width & 0x00FF) as u8, ((width & 0xFF00) / 256) as u8, (height & 0x00FF) as u8, ((height & 0xFF00) / 256) as u8, 24, 0];
file.write_all(&tga_header)?;
for r in (0..height).rev() {
for c in 0..width {
let color : u8 = if image[r][c] == 0 { 0 } else { 0xff };
file.write_all(&[color, color, color])?;
}
}
Ok(())
}
fn main() {
// let filename = "src/input0";
let filename = "../part1/src/input";
// Open the file in read-only mode (ignoring errors).
let file = File::open(filename).unwrap();
let reader = BufReader::new(file);
let mut pixels_info : Vec<u8> = Vec::new();
// Read the file line by line using the lines() iterator from std::io::BufRead.
for (_, line) in reader.lines().enumerate() {
let line = line.unwrap(); // Ignore errors.
// Show the line and its number.
pixels_info = line.as_bytes().to_vec();
break;
}
let pi : Vec<u8> = pixels_info.iter().map(|&x| x - ('0' as u8)).collect();
// println!("{:?}", pi);
const ROW : usize = 6;
const COL : usize = 25;
// const ROW : usize = 2;
// const COL : usize = 2;
const LAYER_SIZE : usize = ROW * COL;
let mut final_image = [[10u8; COL]; ROW];
for j in 0..LAYER_SIZE {
let mut actual_pixel = 2u8;
for i in 0..(pi.len() / LAYER_SIZE) {
let pixel = pi[i * LAYER_SIZE + j];
if pixel == 0 || pixel == 1 {
actual_pixel = pixel;
break;
}
}
final_image[j/COL][j%COL] = actual_pixel;
}
for r in 0..ROW {
for c in 0..COL {
print!("{}", final_image[r][c]);
}
println!();
}
println!();
println!();
for r in 0..ROW {
for c in 0..COL {
print!("{}", if final_image[r][c] == 0 { ' ' } else { '0' });
}
println!();
}
save_tgc(&final_image, ROW as usize, COL as usize);
}
|
fn main() {
let i = 3;// lifetime starts
{
let borrow1 = &i;// starts borrow1
println!("borrow1: {}", borrow1);
}// ends borrow1
{
let borrow2 = &i;// starts borrow2
println!("borrow2: {}", borrow2);
}// ends borrow2
}// lifetime ends |
#[doc = "Register `DMASBMR` reader"]
pub type R = crate::R<DMASBMR_SPEC>;
#[doc = "Register `DMASBMR` writer"]
pub type W = crate::W<DMASBMR_SPEC>;
#[doc = "Field `FB` reader - Fixed Burst Length When this bit is set to 1, the AHB master will initiate burst transfers of specified length (INCRx or SINGLE). When this bit is set to 0, the AHB master will initiate transfers of unspecified length (INCR) or SINGLE transfers."]
pub type FB_R = crate::BitReader;
#[doc = "Field `FB` writer - Fixed Burst Length When this bit is set to 1, the AHB master will initiate burst transfers of specified length (INCRx or SINGLE). When this bit is set to 0, the AHB master will initiate transfers of unspecified length (INCR) or SINGLE transfers."]
pub type FB_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `AAL` reader - Address-Aligned Beats When this bit is set to 1, the master performs address-aligned burst transfers on Read and Write channels."]
pub type AAL_R = crate::BitReader;
#[doc = "Field `AAL` writer - Address-Aligned Beats When this bit is set to 1, the master performs address-aligned burst transfers on Read and Write channels."]
pub type AAL_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `MB` reader - Mixed Burst When this bit is set high and the FB bit is low, the AHB master performs undefined bursts transfers (INCR) for burst length of 16 or more. For burst length of 16 or less, the AHB master performs fixed burst transfers (INCRx and SINGLE)."]
pub type MB_R = crate::BitReader;
#[doc = "Field `RB` reader - Rebuild INCRx Burst When this bit is set high and the AHB master gets SPLIT, RETRY, or Early Burst Termination (EBT) response, the AHB master interface rebuilds the pending beats of any initiated burst transfer with INCRx and SINGLE transfers. By default, the AHB master interface rebuilds pending beats of an EBT with an unspecified (INCR) burst."]
pub type RB_R = crate::BitReader;
impl R {
#[doc = "Bit 0 - Fixed Burst Length When this bit is set to 1, the AHB master will initiate burst transfers of specified length (INCRx or SINGLE). When this bit is set to 0, the AHB master will initiate transfers of unspecified length (INCR) or SINGLE transfers."]
#[inline(always)]
pub fn fb(&self) -> FB_R {
FB_R::new((self.bits & 1) != 0)
}
#[doc = "Bit 12 - Address-Aligned Beats When this bit is set to 1, the master performs address-aligned burst transfers on Read and Write channels."]
#[inline(always)]
pub fn aal(&self) -> AAL_R {
AAL_R::new(((self.bits >> 12) & 1) != 0)
}
#[doc = "Bit 14 - Mixed Burst When this bit is set high and the FB bit is low, the AHB master performs undefined bursts transfers (INCR) for burst length of 16 or more. For burst length of 16 or less, the AHB master performs fixed burst transfers (INCRx and SINGLE)."]
#[inline(always)]
pub fn mb(&self) -> MB_R {
MB_R::new(((self.bits >> 14) & 1) != 0)
}
#[doc = "Bit 15 - Rebuild INCRx Burst When this bit is set high and the AHB master gets SPLIT, RETRY, or Early Burst Termination (EBT) response, the AHB master interface rebuilds the pending beats of any initiated burst transfer with INCRx and SINGLE transfers. By default, the AHB master interface rebuilds pending beats of an EBT with an unspecified (INCR) burst."]
#[inline(always)]
pub fn rb(&self) -> RB_R {
RB_R::new(((self.bits >> 15) & 1) != 0)
}
}
impl W {
#[doc = "Bit 0 - Fixed Burst Length When this bit is set to 1, the AHB master will initiate burst transfers of specified length (INCRx or SINGLE). When this bit is set to 0, the AHB master will initiate transfers of unspecified length (INCR) or SINGLE transfers."]
#[inline(always)]
#[must_use]
pub fn fb(&mut self) -> FB_W<DMASBMR_SPEC, 0> {
FB_W::new(self)
}
#[doc = "Bit 12 - Address-Aligned Beats When this bit is set to 1, the master performs address-aligned burst transfers on Read and Write channels."]
#[inline(always)]
#[must_use]
pub fn aal(&mut self) -> AAL_W<DMASBMR_SPEC, 12> {
AAL_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "System bus mode register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`dmasbmr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`dmasbmr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct DMASBMR_SPEC;
impl crate::RegisterSpec for DMASBMR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`dmasbmr::R`](R) reader structure"]
impl crate::Readable for DMASBMR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`dmasbmr::W`](W) writer structure"]
impl crate::Writable for DMASBMR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets DMASBMR to value 0"]
impl crate::Resettable for DMASBMR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::AtomicWaker;
use std::cell::RefCell;
use std::io::{Error, ErrorKind, Result};
use std::sync::atomic::{AtomicU8, AtomicUsize, Ordering};
use std::task::{Context, Poll};
use crate::ResponseData;
use ds::RingSlice;
use protocol::{Request, RequestId};
#[repr(u8)]
#[derive(Clone, Copy)]
pub enum ItemStatus {
Init = 0u8,
RequestReceived,
RequestSent,
ResponseReceived, // 数据已写入
}
use ItemStatus::*;
impl PartialEq<u8> for ItemStatus {
#[inline(always)]
fn eq(&self, other: &u8) -> bool {
*self as u8 == *other
}
}
impl PartialEq<ItemStatus> for u8 {
#[inline(always)]
fn eq(&self, other: &ItemStatus) -> bool {
*self == *other as u8
}
}
unsafe impl Send for ItemStatus {}
#[derive(Default)]
pub struct Item {
_id: usize,
seq: AtomicUsize, // 用来做request与response的同步
status: AtomicU8, // 0: 待接收请求。
rid: RefCell<RequestId>,
request: RefCell<Option<Request>>,
waker: AtomicWaker,
response: RefCell<RingSlice>,
}
unsafe impl Send for Item {}
impl Item {
pub fn new(cid: usize) -> Self {
Self {
_id: cid,
status: AtomicU8::new(Init as u8),
..Default::default()
}
}
// 把buf的指针保存下来。
// 上面的假设待验证
#[inline(always)]
pub fn place_request(&self, req: &Request) {
debug_assert_eq!(self.status.load(Ordering::Acquire), ItemStatus::Init as u8);
self.status_cas(ItemStatus::Init as u8, ItemStatus::RequestReceived as u8);
*self.request.borrow_mut() = Some(req.clone());
log::debug!(
"item status: place:{:?}",
self.rid.replace(req.id().clone())
);
}
#[inline(always)]
pub fn take_request(&self, seq: usize) -> Request {
let req = self.request.borrow_mut().take().expect("take request");
log::debug!(
"item status: take request. {:?} seq:{} noreply:{}",
self.rid,
seq,
req.noreply()
);
// 如果不需要回复,则request之后立即恢复到init状态
self.status_cas(RequestReceived as u8, RequestSent as u8);
// noreply的请求不需要seq来进行request与response之间的协调
if !req.noreply() {
self.seq_cas(0, seq);
}
req
}
#[inline(always)]
pub fn seq(&self) -> usize {
self.seq.load(Ordering::Acquire)
}
#[inline(always)]
fn status(&self) -> u8 {
self.status.load(Ordering::Acquire)
}
#[inline(always)]
fn seq_cas(&self, old: usize, new: usize) {
match self
.seq
.compare_exchange(old, new, Ordering::AcqRel, Ordering::Acquire)
{
Ok(_) => {}
Err(cur) => panic!("item status seq cas. {} => {} but found {}", old, new, cur),
}
}
// 有两种可能的状态。
// Received, 说明之前从来没有poll过
// Reponded: 有数据并且成功返回
// 调用方确保当前status不为shutdown
pub fn poll_read(&self, cx: &mut Context) -> Poll<Result<ResponseData>> {
let status = self.status();
if status == ResponseReceived {
let response = self.response.take();
let rid = self.rid.take();
log::debug!("item status: read {:?}", response.location());
Poll::Ready(Ok(ResponseData::from(response, rid, self.seq())))
} else if status == Init {
// 在stream/io/receiver中,确保了一定先发送请求,再读取response。
// 所以读请求过来时,状态一定不会是Init。
// 如果是Init,则有一种情况:因为stream异常,状态被重置了。直接返回错误,让client进行容错处理
return Poll::Ready(Err(Error::new(
ErrorKind::ConnectionReset,
"read in init status",
)));
} else {
self.waker.register_by_ref(&cx.waker());
Poll::Pending
}
}
pub fn place_response(&self, response: RingSlice, seq: usize) {
debug_assert_eq!(seq, self.seq());
log::debug!("item status: write :{:?} ", response.location());
self.response.replace(response);
// 1. response到达之前的状态是 RequestSent. 即请求已发出。 这是大多数场景
// 2. 因为在req_handler中,是先发送请求,再调用
// bind_req来更新状态为RequestSent,有可能在这中间,response已经接收到了。此时的状态是RequestReceived。
self.status_cas(RequestSent as u8, ResponseReceived as u8);
self.waker.wake();
}
#[inline(always)]
fn status_cas(&self, old: u8, new: u8) {
match self
.status
.compare_exchange(old, new, Ordering::AcqRel, Ordering::Acquire)
{
Ok(_) => {}
Err(cur) => panic!(
"item status: cas {} => {}, {} found. rid:{:?}",
old, new, cur, self.rid
),
}
}
// 在在sender把Response发送给client后,在Drop中会调用response_done,更新状态。
#[inline]
pub fn response_done(&self, seq: usize) {
// 把状态调整为Init
let status = self.status();
debug_assert_eq!(status, ItemStatus::ResponseReceived as u8);
self.status_cas(status, ItemStatus::Init as u8);
// 如果seq为0,说明有一种情况,之前连接进行过reset,但response已获取。
if self.seq() > 0 {
self.seq_cas(seq, 0);
}
}
#[inline(always)]
pub(crate) fn status_init(&self) -> bool {
self.status() == ItemStatus::Init as u8
}
// reset只会把状态从shutdown变更为init
// 必须在shutdown之后调用
pub(crate) fn reset(&self) {
//self.status_cas(ItemStatus::Shutdown as u8, ItemStatus::Init as u8);
self.status.store(Init as u8, Ordering::Release);
self.seq.store(0, Ordering::Release);
self.waker.wake();
}
}
|
use std;
use rustc;
// -*- rust -*-
import core::{option, str, vec, result};
import result::{ok, err};
import std::{io, getopts};
import io::writer_util;
import option::{some, none};
import getopts::{opt_present};
import rustc::driver::driver::*;
import rustc::syntax::codemap;
import rustc::driver::diagnostic;
fn version(argv0: str) {
let vers = "unknown version";
let env_vers = #env["CFG_VERSION"];
if str::byte_len(env_vers) != 0u { vers = env_vers; }
io::stdout().write_str(#fmt["%s %s\n", argv0, vers]);
io::stdout().write_str(#fmt["host: %s\n", host_triple()]);
}
fn usage(argv0: str) {
io::stdout().write_str(#fmt["usage: %s [options] <input>\n", argv0] +
"
options:
-h --help display this message
-v --version print version info and exit
-o <filename> write output to <filename>
--out-dir <dir> write output to compiler-chosen filename in <dir>
--lib compile a library crate
--bin compile an executable crate (default)
--static use or produce static libraries
--no-core omit the 'core' library (used and imported by default)
--pretty [type] pretty-print the input instead of compiling
--ls list the symbols defined by a crate file
-L <path> add a directory to the library search path
--no-verify suppress LLVM verification step (slight speedup)
--parse-only parse only; do not compile, assemble, or link
--no-trans run all passes except translation; no output
-g produce debug info
--opt-level <lvl> optimize with possible levels 0-3
-O equivalent to --opt-level=2
-S compile only; do not assemble or link
--no-asm-comments do not add comments into the assembly source
-c compile and assemble, but do not link
--emit-llvm produce an LLVM bitcode file
--save-temps write intermediate files in addition to normal output
--stats gather and report various compilation statistics
--cfg <cfgspec> configure the compilation environment
--time-passes time the individual phases of the compiler
--time-llvm-passes time the individual phases of the LLVM backend
--sysroot <path> override the system root
--target <triple> target to compile for (default: host triple)
--test build test harness
--gc garbage collect shared data (experimental/temporary)
--warn-unused-imports
warn about unnecessary imports
--no-lint-ctypes suppress lint-style ctypes usage check
");
}
fn run_compiler(args: [str], demitter: diagnostic::emitter) {
// Don't display log spew by default. Can override with RUST_LOG.
logging::console_off();
let args = args, binary = vec::shift(args);
if vec::len(args) == 0u { usage(binary); ret; }
let match =
alt getopts::getopts(args, opts()) {
ok(m) { m }
err(f) {
early_error(demitter, getopts::fail_str(f))
}
};
if opt_present(match, "h") || opt_present(match, "help") {
usage(binary);
ret;
}
if opt_present(match, "v") || opt_present(match, "version") {
version(binary);
ret;
}
let ifile = alt vec::len(match.free) {
0u { early_error(demitter, "No input filename given.") }
1u { match.free[0] }
_ { early_error(demitter, "Multiple input filenames provided.") }
};
let sopts = build_session_options(match, demitter);
let sess = build_session(sopts, ifile, demitter);
let odir = getopts::opt_maybe_str(match, "out-dir");
let ofile = getopts::opt_maybe_str(match, "o");
let cfg = build_configuration(sess, binary, ifile);
let pretty =
option::map(getopts::opt_default(match, "pretty",
"normal"),
bind parse_pretty(sess, _));
alt pretty {
some::<pp_mode>(ppm) { pretty_print_input(sess, cfg, ifile, ppm); ret; }
none::<pp_mode> {/* continue */ }
}
let ls = opt_present(match, "ls");
if ls {
list_metadata(sess, ifile, io::stdout());
ret;
}
compile_input(sess, cfg, ifile, odir, ofile);
}
/*
This is a sanity check that any failure of the compiler is performed
through the diagnostic module and reported properly - we shouldn't be calling
plain-old-fail on any execution path that might be taken. Since we have
console logging off by default, hitting a plain fail statement would make the
compiler silently exit, which would be terrible.
This method wraps the compiler in a subtask and injects a function into the
diagnostic emitter which records when we hit a fatal error. If the task
fails without recording a fatal error then we've encountered a compiler
bug and need to present an error.
*/
fn monitor(f: fn~(diagnostic::emitter)) {
enum monitor_msg {
fatal,
done,
};
let p = comm::port();
let ch = comm::chan(p);
alt task::try {||
task::unsupervise();
// The 'diagnostics emitter'. Every error, warning, etc. should
// go through this function.
let demitter = fn@(cmsp: option<(codemap::codemap, codemap::span)>,
msg: str, lvl: diagnostic::level) {
if lvl == diagnostic::fatal {
comm::send(ch, fatal);
}
diagnostic::emit(cmsp, msg, lvl);
};
resource finally(ch: comm::chan<monitor_msg>) {
comm::send(ch, done);
}
let _finally = finally(ch);
f(demitter)
} {
result::ok(_) { /* fallthrough */ }
result::err(_) {
// Task failed without emitting a fatal diagnostic
if comm::recv(p) == done {
diagnostic::emit(
none,
diagnostic::ice_msg("unexpected failure"),
diagnostic::error);
let note = "The compiler hit an unexpected failure path. \
This is a bug. Try running with \
RUST_LOG=rustc=0,::rt::backtrace \
to get further details and report the results \
to github.com/mozilla/rust/issues";
diagnostic::emit(none, note, diagnostic::note);
}
// Fail so the process returns a failure code
fail;
}
}
}
fn main(args: [str]) {
monitor {|demitter|
run_compiler(args, demitter);
}
}
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::{models, API_VERSION};
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
FileSystem_Mkdirs(#[from] file_system::mkdirs::Error),
#[error(transparent)]
FileSystem_Concat(#[from] file_system::concat::Error),
#[error(transparent)]
FileSystem_MsConcat(#[from] file_system::ms_concat::Error),
#[error(transparent)]
FileSystem_ListFileStatus(#[from] file_system::list_file_status::Error),
#[error(transparent)]
FileSystem_GetContentSummary(#[from] file_system::get_content_summary::Error),
#[error(transparent)]
FileSystem_GetFileStatus(#[from] file_system::get_file_status::Error),
#[error(transparent)]
FileSystem_Open(#[from] file_system::open::Error),
#[error(transparent)]
FileSystem_Append(#[from] file_system::append::Error),
#[error(transparent)]
FileSystem_Create(#[from] file_system::create::Error),
#[error(transparent)]
FileSystem_SetAcl(#[from] file_system::set_acl::Error),
#[error(transparent)]
FileSystem_ModifyAclEntries(#[from] file_system::modify_acl_entries::Error),
#[error(transparent)]
FileSystem_RemoveAclEntries(#[from] file_system::remove_acl_entries::Error),
#[error(transparent)]
FileSystem_RemoveDefaultAcl(#[from] file_system::remove_default_acl::Error),
#[error(transparent)]
FileSystem_RemoveAcl(#[from] file_system::remove_acl::Error),
#[error(transparent)]
FileSystem_GetAclStatus(#[from] file_system::get_acl_status::Error),
#[error(transparent)]
FileSystem_Delete(#[from] file_system::delete::Error),
#[error(transparent)]
FileSystem_Rename(#[from] file_system::rename::Error),
#[error(transparent)]
FileSystem_SetOwner(#[from] file_system::set_owner::Error),
#[error(transparent)]
FileSystem_SetPermission(#[from] file_system::set_permission::Error),
}
pub mod file_system {
use super::{models, API_VERSION};
pub async fn mkdirs(
operation_config: &crate::OperationConfig,
path: &str,
permission: Option<i32>,
op: &str,
) -> std::result::Result<models::FileOperationResult, mkdirs::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=MKDIRS", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(mkdirs::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(mkdirs::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(permission) = permission {
url.query_pairs_mut().append_pair("permission", permission.to_string().as_str());
}
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(mkdirs::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(mkdirs::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileOperationResult =
serde_json::from_slice(rsp_body).map_err(|source| mkdirs::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError =
serde_json::from_slice(rsp_body).map_err(|source| mkdirs::Error::DeserializeError(source, rsp_body.clone()))?;
Err(mkdirs::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod mkdirs {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn concat(
operation_config: &crate::OperationConfig,
path: &str,
sources: &[&str],
op: &str,
) -> std::result::Result<(), concat::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=CONCAT", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(concat::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(concat::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(concat::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(concat::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError =
serde_json::from_slice(rsp_body).map_err(|source| concat::Error::DeserializeError(source, rsp_body.clone()))?;
Err(concat::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod concat {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn ms_concat(
operation_config: &crate::OperationConfig,
path: &str,
delete_source_directory: Option<bool>,
stream_contents: &serde_json::Value,
op: &str,
) -> std::result::Result<(), ms_concat::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=MSCONCAT", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(ms_concat::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(ms_concat::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(delete_source_directory) = delete_source_directory {
url.query_pairs_mut()
.append_pair("deleteSourceDirectory", delete_source_directory.to_string().as_str());
}
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(stream_contents).map_err(ms_concat::Error::SerializeError)?;
url.query_pairs_mut().append_pair("op", op);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(ms_concat::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(ms_concat::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError =
serde_json::from_slice(rsp_body).map_err(|source| ms_concat::Error::DeserializeError(source, rsp_body.clone()))?;
Err(ms_concat::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod ms_concat {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn list_file_status(
operation_config: &crate::OperationConfig,
path: &str,
list_size: Option<i32>,
list_after: Option<&str>,
list_before: Option<&str>,
too_id: Option<bool>,
op: &str,
) -> std::result::Result<models::FileStatusesResult, list_file_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=LISTSTATUS", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(list_file_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(list_file_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(list_size) = list_size {
url.query_pairs_mut().append_pair("listSize", list_size.to_string().as_str());
}
if let Some(list_after) = list_after {
url.query_pairs_mut().append_pair("listAfter", list_after);
}
if let Some(list_before) = list_before {
url.query_pairs_mut().append_pair("listBefore", list_before);
}
if let Some(too_id) = too_id {
url.query_pairs_mut().append_pair("tooId", too_id.to_string().as_str());
}
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(list_file_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(list_file_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileStatusesResult = serde_json::from_slice(rsp_body)
.map_err(|source| list_file_status::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError = serde_json::from_slice(rsp_body)
.map_err(|source| list_file_status::Error::DeserializeError(source, rsp_body.clone()))?;
Err(list_file_status::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod list_file_status {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_content_summary(
operation_config: &crate::OperationConfig,
path: &str,
op: &str,
) -> std::result::Result<models::ContentSummaryResult, get_content_summary::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=GETCONTENTSUMMARY", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(get_content_summary::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_content_summary::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_content_summary::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_content_summary::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::ContentSummaryResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_content_summary::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError = serde_json::from_slice(rsp_body)
.map_err(|source| get_content_summary::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_content_summary::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_content_summary {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_file_status(
operation_config: &crate::OperationConfig,
path: &str,
too_id: Option<bool>,
op: &str,
) -> std::result::Result<models::FileStatusResult, get_file_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=GETFILESTATUS", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(get_file_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_file_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(too_id) = too_id {
url.query_pairs_mut().append_pair("tooId", too_id.to_string().as_str());
}
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_file_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_file_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileStatusResult = serde_json::from_slice(rsp_body)
.map_err(|source| get_file_status::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError = serde_json::from_slice(rsp_body)
.map_err(|source| get_file_status::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_file_status::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_file_status {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn open(
operation_config: &crate::OperationConfig,
path: &str,
length: Option<i64>,
offset: Option<i64>,
file_session_id: Option<&str>,
read: &str,
op: &str,
) -> std::result::Result<bytes::Bytes, open::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=OPEN", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(open::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(open::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(length) = length {
url.query_pairs_mut().append_pair("length", length.to_string().as_str());
}
if let Some(offset) = offset {
url.query_pairs_mut().append_pair("offset", offset.to_string().as_str());
}
if let Some(file_session_id) = file_session_id {
url.query_pairs_mut().append_pair("fileSessionId", file_session_id);
}
url.query_pairs_mut().append_pair("read", read);
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(open::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(open::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value = rsp_body.clone();
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError =
serde_json::from_slice(rsp_body).map_err(|source| open::Error::DeserializeError(source, rsp_body.clone()))?;
Err(open::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod open {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn append(
operation_config: &crate::OperationConfig,
path: &str,
stream_contents: &serde_json::Value,
offset: Option<i64>,
sync_flag: Option<&str>,
lease_id: Option<&str>,
file_session_id: Option<&str>,
append: &str,
op: &str,
) -> std::result::Result<(), append::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=APPEND", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(append::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(append::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(stream_contents).map_err(append::Error::SerializeError)?;
if let Some(offset) = offset {
url.query_pairs_mut().append_pair("offset", offset.to_string().as_str());
}
if let Some(sync_flag) = sync_flag {
url.query_pairs_mut().append_pair("syncFlag", sync_flag);
}
if let Some(lease_id) = lease_id {
url.query_pairs_mut().append_pair("leaseId", lease_id);
}
if let Some(file_session_id) = file_session_id {
url.query_pairs_mut().append_pair("fileSessionId", file_session_id);
}
url.query_pairs_mut().append_pair("append", append);
url.query_pairs_mut().append_pair("op", op);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(append::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(append::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError =
serde_json::from_slice(rsp_body).map_err(|source| append::Error::DeserializeError(source, rsp_body.clone()))?;
Err(append::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod append {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn create(
operation_config: &crate::OperationConfig,
path: &str,
stream_contents: Option<&serde_json::Value>,
overwrite: Option<bool>,
sync_flag: Option<&str>,
lease_id: Option<&str>,
permission: Option<i32>,
write: &str,
op: &str,
) -> std::result::Result<(), create::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=CREATE", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(create::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(create::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
let req_body = if let Some(stream_contents) = stream_contents {
req_builder = req_builder.header("content-type", "application/json");
azure_core::to_json(stream_contents).map_err(create::Error::SerializeError)?
} else {
bytes::Bytes::from_static(azure_core::EMPTY_BODY)
};
if let Some(overwrite) = overwrite {
url.query_pairs_mut().append_pair("overwrite", overwrite.to_string().as_str());
}
if let Some(sync_flag) = sync_flag {
url.query_pairs_mut().append_pair("syncFlag", sync_flag);
}
if let Some(lease_id) = lease_id {
url.query_pairs_mut().append_pair("leaseId", lease_id);
}
if let Some(permission) = permission {
url.query_pairs_mut().append_pair("permission", permission.to_string().as_str());
}
url.query_pairs_mut().append_pair("write", write);
url.query_pairs_mut().append_pair("op", op);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(create::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(create::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::CREATED => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError =
serde_json::from_slice(rsp_body).map_err(|source| create::Error::DeserializeError(source, rsp_body.clone()))?;
Err(create::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod create {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn set_acl(
operation_config: &crate::OperationConfig,
path: &str,
aclspec: &str,
op: &str,
) -> std::result::Result<(), set_acl::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=SETACL", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(set_acl::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(set_acl::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("aclspec", aclspec);
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(set_acl::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(set_acl::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError =
serde_json::from_slice(rsp_body).map_err(|source| set_acl::Error::DeserializeError(source, rsp_body.clone()))?;
Err(set_acl::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod set_acl {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn modify_acl_entries(
operation_config: &crate::OperationConfig,
path: &str,
aclspec: &str,
op: &str,
) -> std::result::Result<(), modify_acl_entries::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=MODIFYACLENTRIES", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(modify_acl_entries::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(modify_acl_entries::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("aclspec", aclspec);
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(modify_acl_entries::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(modify_acl_entries::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError = serde_json::from_slice(rsp_body)
.map_err(|source| modify_acl_entries::Error::DeserializeError(source, rsp_body.clone()))?;
Err(modify_acl_entries::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod modify_acl_entries {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn remove_acl_entries(
operation_config: &crate::OperationConfig,
path: &str,
aclspec: &str,
op: &str,
) -> std::result::Result<(), remove_acl_entries::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=REMOVEACLENTRIES", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(remove_acl_entries::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(remove_acl_entries::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("aclspec", aclspec);
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(remove_acl_entries::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(remove_acl_entries::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError = serde_json::from_slice(rsp_body)
.map_err(|source| remove_acl_entries::Error::DeserializeError(source, rsp_body.clone()))?;
Err(remove_acl_entries::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod remove_acl_entries {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn remove_default_acl(
operation_config: &crate::OperationConfig,
path: &str,
op: &str,
) -> std::result::Result<(), remove_default_acl::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=REMOVEDEFAULTACL", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(remove_default_acl::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(remove_default_acl::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(remove_default_acl::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(remove_default_acl::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError = serde_json::from_slice(rsp_body)
.map_err(|source| remove_default_acl::Error::DeserializeError(source, rsp_body.clone()))?;
Err(remove_default_acl::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod remove_default_acl {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn remove_acl(operation_config: &crate::OperationConfig, path: &str, op: &str) -> std::result::Result<(), remove_acl::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=REMOVEACL", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(remove_acl::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(remove_acl::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(remove_acl::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(remove_acl::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError =
serde_json::from_slice(rsp_body).map_err(|source| remove_acl::Error::DeserializeError(source, rsp_body.clone()))?;
Err(remove_acl::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod remove_acl {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn get_acl_status(
operation_config: &crate::OperationConfig,
path: &str,
too_id: Option<bool>,
op: &str,
) -> std::result::Result<models::AclStatusResult, get_acl_status::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=GETACLSTATUS", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(get_acl_status::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(get_acl_status::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(too_id) = too_id {
url.query_pairs_mut().append_pair("tooId", too_id.to_string().as_str());
}
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(get_acl_status::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(get_acl_status::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::AclStatusResult =
serde_json::from_slice(rsp_body).map_err(|source| get_acl_status::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError =
serde_json::from_slice(rsp_body).map_err(|source| get_acl_status::Error::DeserializeError(source, rsp_body.clone()))?;
Err(get_acl_status::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod get_acl_status {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn delete(
operation_config: &crate::OperationConfig,
path: &str,
recursive: Option<bool>,
op: &str,
) -> std::result::Result<models::FileOperationResult, delete::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=DELETE", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(delete::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(delete::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(recursive) = recursive {
url.query_pairs_mut().append_pair("recursive", recursive.to_string().as_str());
}
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(delete::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(delete::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileOperationResult =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError =
serde_json::from_slice(rsp_body).map_err(|source| delete::Error::DeserializeError(source, rsp_body.clone()))?;
Err(delete::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod delete {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn rename(
operation_config: &crate::OperationConfig,
path: &str,
destination: &str,
op: &str,
) -> std::result::Result<models::FileOperationResult, rename::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=RENAME", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(rename::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(rename::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
url.query_pairs_mut().append_pair("destination", destination);
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(rename::Error::BuildRequestError)?;
let rsp = http_client.execute_request(req).await.map_err(rename::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => {
let rsp_body = rsp.body();
let rsp_value: models::FileOperationResult =
serde_json::from_slice(rsp_body).map_err(|source| rename::Error::DeserializeError(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError =
serde_json::from_slice(rsp_body).map_err(|source| rename::Error::DeserializeError(source, rsp_body.clone()))?;
Err(rename::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod rename {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn set_owner(
operation_config: &crate::OperationConfig,
path: &str,
owner: Option<&str>,
group: Option<&str>,
op: &str,
) -> std::result::Result<(), set_owner::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=SETOWNER", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(set_owner::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(set_owner::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(owner) = owner {
url.query_pairs_mut().append_pair("owner", owner);
}
if let Some(group) = group {
url.query_pairs_mut().append_pair("group", group);
}
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(set_owner::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(set_owner::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError =
serde_json::from_slice(rsp_body).map_err(|source| set_owner::Error::DeserializeError(source, rsp_body.clone()))?;
Err(set_owner::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod set_owner {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
pub async fn set_permission(
operation_config: &crate::OperationConfig,
path: &str,
permission: Option<&str>,
op: &str,
) -> std::result::Result<(), set_permission::Error> {
let http_client = operation_config.http_client();
let url_str = &format!("{}/webhdfs/v1/{}?op=SETPERMISSION", operation_config.base_path(), path);
let mut url = url::Url::parse(url_str).map_err(set_permission::Error::ParseUrlError)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
if let Some(token_credential) = operation_config.token_credential() {
let token_response = token_credential
.get_token(operation_config.token_credential_resource())
.await
.map_err(set_permission::Error::GetTokenError)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
}
url.query_pairs_mut().append_pair("api-version", super::API_VERSION);
if let Some(permission) = permission {
url.query_pairs_mut().append_pair("permission", permission);
}
url.query_pairs_mut().append_pair("op", op);
let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(set_permission::Error::BuildRequestError)?;
let rsp = http_client
.execute_request(req)
.await
.map_err(set_permission::Error::ExecuteRequestError)?;
match rsp.status() {
http::StatusCode::OK => Ok(()),
status_code => {
let rsp_body = rsp.body();
let rsp_value: models::AdlsError =
serde_json::from_slice(rsp_body).map_err(|source| set_permission::Error::DeserializeError(source, rsp_body.clone()))?;
Err(set_permission::Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
}
pub mod set_permission {
use super::{models, API_VERSION};
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::AdlsError,
},
#[error("Failed to parse request URL: {0}")]
ParseUrlError(url::ParseError),
#[error("Failed to build request: {0}")]
BuildRequestError(http::Error),
#[error("Failed to execute request: {0}")]
ExecuteRequestError(azure_core::HttpError),
#[error("Failed to serialize request body: {0}")]
SerializeError(serde_json::Error),
#[error("Failed to deserialize response: {0}, body: {1:?}")]
DeserializeError(serde_json::Error, bytes::Bytes),
#[error("Failed to get access token: {0}")]
GetTokenError(azure_core::Error),
}
}
}
|
//! # 376. 摆动序列
//!https://leetcode-cn.com/problems/wiggle-subsequence/
//!如果连续数字之间的差严格地在正数和负数之间交替,则数字序列称为摆动序列。第一个差(如果存在的话)可能是正数或负数。少于两个元素的序列也是摆动序列。
//!例如, [1,7,4,9,2,5] 是一个摆动序列,因为差值 (6,-3,5,-7,3) 是正负交替出现的。相反, [1,4,7,2,5] 和 [1,7,4,5,5] 不是摆动序列,第一个序列是因为它的前两个差值都是正数,第二个序列是因为它的最后一个差值为零。
//!给定一个整数序列,返回作为摆动序列的最长子序列的长度。 通过从原始序列中删除一些(也可以不删除)元素来获得子序列,剩下的元素保持其原始顺序。
//! #解题思路
//!根据a[n]-a[n-1]符号划分down up
//!a[n]>a[n-1] up[i]=max(up[i-1],down[i-1]+1) down[i] = down[i-1]
//!a[n]<a[n-1] down[i]=max(down[i-1],up[i-1]+1) up[i] = up[i-1]
//!a[n]=a[n-1] down[i]=down[i-1] up[i]=up[i-1]
//!
pub struct Solution;
impl Solution {
pub fn wiggle_max_length(nums: Vec<i32>) -> i32 {
let len = nums.len();
if nums.len() < 2 {
return nums.len() as i32;
}
let (mut down, mut up) = (vec![1; len], vec![1; len]);
for i in 1..len {
if nums[i] > nums[i - 1] {
down[i] = down[i - 1];
up[i] = std::cmp::max(up[i - 1], down[i - 1] + 1);
} else if nums[i] < nums[i - 1] {
up[i] = up[i - 1];
down[i] = std::cmp::max(down[i - 1], up[i - 1] + 1);
} else {
down[i] = down[i - 1];
up[i] = up[i - 1];
}
}
std::cmp::max(down[len - 1], up[len - 1])
}
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(
super::Solution::wiggle_max_length([1, 17, 5, 10, 13, 15, 10, 5, 16, 8].into()),
7
);
assert_eq!(
super::Solution::wiggle_max_length([1, 1, 1, 1, 1, 1, 1].into()),
1
);
}
}
|
pub mod random;
pub mod metrics;
pub mod cli; |
mod repository;
mod statistics;
pub use repository::*;
pub use statistics::*;
use common::model::StringId;
use common::result::Result;
pub type PublicationId = StringId;
use crate::domain::user::User;
#[derive(Debug, Clone)]
pub struct Publication {
id: PublicationId,
author: User,
statistics: Statistics,
}
impl Publication {
pub fn new(id: PublicationId, author: User, statistics: Statistics) -> Result<Self> {
Ok(Publication {
id,
author,
statistics,
})
}
pub fn id(&self) -> &PublicationId {
&self.id
}
pub fn author(&self) -> &User {
&self.author
}
pub fn statistics(&self) -> &Statistics {
&self.statistics
}
}
|
//! A simple Driver for the [Waveshare](https://github.com/waveshare/e-Paper) E-Ink Displays via SPI
//!
//! - Built using [`embedded-hal`] traits.
//! - Graphics support is added through [`embedded-graphics`]
//!
//! [`embedded-graphics`]: https://docs.rs/embedded-graphics/
//! [`embedded-hal`]: https://docs.rs/embedded-hal
//!
//!
//! # Example
//!
//!```rust, no_run
//!# use embedded_hal_mock::*;
//!# fn main() -> Result<(), MockError> {
//!use embedded_graphics::{
//! pixelcolor::BinaryColor::On as Black, prelude::*, primitives::{Line, PrimitiveStyle},
//!};
//!use epd_waveshare::{epd1in54::*, prelude::*};
//!#
//!# let expectations = [];
//!# let mut spi = spi::Mock::new(&expectations);
//!# let expectations = [];
//!# let cs_pin = pin::Mock::new(&expectations);
//!# let busy_in = pin::Mock::new(&expectations);
//!# let dc = pin::Mock::new(&expectations);
//!# let rst = pin::Mock::new(&expectations);
//!# let mut delay = delay::MockNoop::new();
//!
//!// Setup EPD
//!let mut epd = Epd1in54::new(&mut spi, cs_pin, busy_in, dc, rst, &mut delay, None)?;
//!
//!// Use display graphics from embedded-graphics
//!let mut display = Display1in54::default();
//!
//!// Use embedded graphics for drawing a line
//!
//!let _ = Line::new(Point::new(0, 120), Point::new(0, 295))
//! .into_styled(PrimitiveStyle::with_stroke(Color::Black, 1))
//! .draw(&mut display);
//!
//! // Display updated frame
//!epd.update_frame(&mut spi, &display.buffer(), &mut delay)?;
//!epd.display_frame(&mut spi, &mut delay)?;
//!
//!// Set the EPD to sleep
//!epd.sleep(&mut spi, &mut delay)?;
//!# Ok(())
//!# }
//!```
//!
//! # Other information and requirements
//!
//! - Buffersize: Wherever a buffer is used it always needs to be of the size: `width / 8 * length`,
//! where width and length being either the full e-ink size or the partial update window size
//!
//! ### SPI
//!
//! MISO is not connected/available. SPI_MODE_0 is used (CPHL = 0, CPOL = 0) with 8 bits per word, MSB first.
//!
//! Maximum speed tested by myself was 8Mhz but more should be possible (Ben Krasnow used 18Mhz with his implemenation)
//!
#![no_std]
#![deny(missing_docs)]
#[cfg(feature = "graphics")]
pub mod graphics;
mod traits;
pub mod color;
/// Interface for the physical connection between display and the controlling device
mod interface;
pub mod epd1in54;
pub mod epd1in54_v2;
pub mod epd1in54b;
pub mod epd1in54c;
pub mod epd2in13_v2;
pub mod epd2in13bc;
pub mod epd2in7b;
pub mod epd2in9;
pub mod epd2in9_v2;
pub mod epd2in9bc;
pub mod epd3in7;
pub mod epd4in2;
pub mod epd5in65f;
pub mod epd5in83b_v2;
pub mod epd7in5;
pub mod epd7in5_hd;
pub mod epd7in5_v2;
pub mod epd7in5_v3;
pub mod epd7in5b_v2;
pub(crate) mod type_a;
/// Includes everything important besides the chosen Display
pub mod prelude {
pub use crate::color::{Color, OctColor, TriColor};
pub use crate::traits::{
QuickRefresh, RefreshLut, WaveshareDisplay, WaveshareThreeColorDisplay,
};
pub use crate::SPI_MODE;
#[cfg(feature = "graphics")]
pub use crate::graphics::{Display, DisplayRotation};
}
/// Computes the needed buffer length. Takes care of rounding up in case width
/// is not divisible by 8.
///
/// unused
/// bits width
/// <----><------------------------>
/// \[XXXXX210\]\[76543210\]...\[76543210\] ^
/// \[XXXXX210\]\[76543210\]...\[76543210\] | height
/// \[XXXXX210\]\[76543210\]...\[76543210\] v
pub const fn buffer_len(width: usize, height: usize) -> usize {
(width + 7) / 8 * height
}
use embedded_hal::spi::{Mode, Phase, Polarity};
/// SPI mode -
/// For more infos see [Requirements: SPI](index.html#spi)
pub const SPI_MODE: Mode = Mode {
phase: Phase::CaptureOnFirstTransition,
polarity: Polarity::IdleLow,
};
|
use itertools::Itertools;
use std::collections::HashMap;
use std::str::FromStr;
fn main() -> std::io::Result<()> {
let input = std::fs::read_to_string("examples/14/input.txt")?;
let lines = input.lines().map(|line| line.parse::<Line>().unwrap());
let mut mask = vec![];
let mut memory: HashMap<usize, u64> = HashMap::new();
for line in lines {
match line {
Line::Mask(v) => {
mask = v;
}
Line::Write(w) => {
let mut value = w.value;
for bit in &mask {
match bit {
(i, Some(true)) => {
value |= 1 << i;
}
(i, Some(false)) => {
value &= !(1 << i);
}
_ => {}
}
}
memory.insert(w.address, value);
}
}
}
println!("{}", memory.iter().map(|(_, v)| v).sum::<u64>());
// Part 2
let lines = input.lines().map(|line| line.parse::<Line>().unwrap());
let mut mask = vec![];
let mut memory: HashMap<usize, u64> = HashMap::new();
for line in lines {
match line {
Line::Mask(v) => {
mask = v;
}
Line::Write(w) => {
let mut address = w.address;
for bit in &mask {
match bit {
(i, Some(true)) => {
address |= 1 << i;
}
_ => {}
}
}
let floating = mask.iter().filter_map(|x| match x {
(i, None) => Some(i),
_ => None,
});
let bits_it = floating
.map(|x| vec![(*x, true), (*x, false)])
.multi_cartesian_product();
for bits in bits_it {
let mut _address = address;
for bit in bits {
match bit {
(i, true) => {
_address |= 1 << i;
}
(i, false) => {
_address &= !(1 << i);
}
}
}
memory.insert(_address, w.value);
}
}
}
}
println!("{}", memory.iter().map(|(_, v)| v).sum::<u64>());
Ok(())
}
struct Write {
address: usize,
value: u64,
}
type Mask = Vec<(usize, Option<bool>)>;
enum Line {
Mask(Mask),
Write(Write),
}
#[derive(Debug)]
struct ParseError;
impl FromStr for Line {
type Err = ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut it = s.split('=').map(|x| x.trim());
let a = it.next().unwrap();
match a.chars().nth(1).unwrap() {
'a' => {
let mask = it.next().unwrap();
let mask: Vec<_> = mask
.chars()
.enumerate()
.filter_map(|(i, x)| match x {
'X' => Some((35 - i, None)),
'1' => Some((35 - i, Some(true))),
'0' => Some((35 - i, Some(false))),
_ => panic!(),
})
.collect();
Ok(Line::Mask(mask))
}
'e' => {
let chs = a.chars().skip(4);
let address = chs
.take_while(|x| *x != ']')
.collect::<String>()
.parse::<usize>()
.unwrap();
let value = it.next().unwrap().parse::<u64>().unwrap();
Ok(Line::Write(Write { value, address }))
}
_ => panic!(),
}
}
}
|
#![feature(panic_info_message)]
#![feature(int_to_from_bytes)]
extern crate chacha;
extern crate rand;
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate sha3;
extern crate toml;
pub mod config;
pub mod connector;
pub mod encryption;
pub mod keyboard_and_clicks;
pub mod keyboard_reset;
pub mod mouse;
pub mod pretty_panic;
pub mod utils;
use std::time::Duration;
pub const MAIN_LOOP_INTERVAL: Duration = Duration::from_nanos(1);
|
#![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Error {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<ErrorDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorDetail {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WorkspaceCollectionList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<WorkspaceCollection>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WorkspaceList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Workspace>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationList {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AzureSku {
pub name: azure_sku::Name,
pub tier: azure_sku::Tier,
}
pub mod azure_sku {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Name {
S1,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Tier {
Standard,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WorkspaceCollectionAccessKeys {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub key1: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub key2: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WorkspaceCollectionAccessKey {
#[serde(rename = "keyName", default, skip_serializing_if = "Option::is_none")]
pub key_name: Option<workspace_collection_access_key::KeyName>,
}
pub mod workspace_collection_access_key {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum KeyName {
#[serde(rename = "key1")]
Key1,
#[serde(rename = "key2")]
Key2,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Workspace {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct WorkspaceCollection {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<AzureSku>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<serde_json::Value>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CreateWorkspaceCollectionRequest {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<AzureSku>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct UpdateWorkspaceCollectionRequest {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tags: Option<serde_json::Value>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sku: Option<AzureSku>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CheckNameRequest {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct CheckNameResponse {
#[serde(rename = "nameAvailable", default, skip_serializing_if = "Option::is_none")]
pub name_available: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reason: Option<check_name_response::Reason>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
pub mod check_name_response {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Reason {
Unavailable,
Invalid,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MigrateWorkspaceCollectionRequest {
#[serde(rename = "targetResourceGroup", default, skip_serializing_if = "Option::is_none")]
pub target_resource_group: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub resources: Vec<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<Display>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub origin: Option<String>,
}
|
use op;
use regex;
pub trait OpTransform {
fn transform(&self, op: op::Op) -> op::Op;
}
pub struct NsFilterTransform {
allowed_pattern: regex::Regex,
}
impl NsFilterTransform {
pub fn new(patten: regex::Regex) -> NsFilterTransform {
NsFilterTransform { allowed_pattern: patten }
}
}
impl OpTransform for NsFilterTransform {
fn transform(&self, op: op::Op) -> op::Op {
let is_match = {
if let Some(ns) = op.get_ns() {
self.allowed_pattern.is_match(ns)
} else {
false
}
};
if is_match {
return op;
}
op::Op::NoOp {
ts: op.get_ts().clone(),
h: op.get_h().clone(),
}
}
}
|
extern crate parallel_iterator;
use parallel_iterator::ParallelIterator;
fn do_some_work(i: u32) -> u32 {
i + 1 // let's pretend this is a heavy calculation
}
fn main() {
for i in ParallelIterator::new(|| (0u32..100), || do_some_work) {
println!("Got a number: {}!", i);
}
}
|
use crate::glsl::{Glsl, GlslFragment, GlslLine};
use std::{
collections::HashMap,
convert::{TryFrom, TryInto},
};
use syn::{spanned::Spanned, Block, Error, ExprBlock, Result};
use crate::{
yasl_ident::YaslIdent,
yasl_stmt::YaslStmt,
yasl_type::{Typed, YaslType},
};
#[derive(Debug)]
pub struct YaslBlock {
brace_token: syn::token::Brace,
stmts: Vec<YaslStmt>,
}
impl YaslBlock {
pub fn attempt_type_anotation(&mut self, global_idents: &HashMap<String, YaslType>) {
let mut idents = global_idents.clone();
for stmt in self.stmts.iter_mut() {
for ident in stmt.update_idents() {
idents.insert(ident.to_string(), ident.get_type().unwrap().clone());
}
stmt.attempt_type_anotation(&idents);
}
}
}
impl From<&YaslBlock> for Glsl {
fn from(block: &YaslBlock) -> Glsl {
let mut elements = Vec::new();
elements.push(Glsl::Line(GlslLine {
span: Some(block.brace_token.span),
ends_with_semi: false,
glsl_string: "{".into(),
}));
for s in block.stmts.iter() {
elements.push(s.into());
}
elements.push(Glsl::Line(GlslLine {
span: Some(block.brace_token.span),
ends_with_semi: false,
glsl_string: "}".into(),
}));
Glsl::Fragment(GlslFragment { elements })
}
}
impl TryFrom<Block> for YaslBlock {
type Error = Error;
fn try_from(block: Block) -> Result<Self> {
let mut stmts = Vec::new();
for s in block.stmts.into_iter() {
stmts.push(s.try_into()?);
}
let brace_token = block.brace_token;
Ok(Self { brace_token, stmts })
}
}
impl TryFrom<ExprBlock> for YaslBlock {
type Error = Error;
fn try_from(block: ExprBlock) -> Result<Self> {
block.block.try_into()
}
}
|
use ::libc;
use std::convert::TryInto;
extern "C" {
#[no_mangle]
fn memcpy(_: *mut libc::c_void, _: *const libc::c_void, _: libc::c_ulong)
-> *mut libc::c_void;
#[no_mangle]
fn memmove(_: *mut libc::c_void, _: *const libc::c_void, _: libc::c_ulong)
-> *mut libc::c_void;
}
pub type uint8_t = __uint8_t;
pub type __uint8_t = libc::c_uchar;
pub type uint32_t = __uint32_t;
pub type __uint32_t = libc::c_uint;
pub type uint64_t = __uint64_t;
pub type __uint64_t = libc::c_ulong;
pub type uint16_t = __uint16_t;
pub type __uint16_t = libc::c_ushort;
/*
FastLZ - Byte-aligned LZ77 compression library
Copyright (C) 2005-2020 Ariya Hidayat <ariya.hidayat@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/*
* Always check for bound when decompressing.
* Generally it is best to leave it defined.
*/
/*
* Give hints to the compiler for branch prediction optimization.
*/
/*
* Specialize custom 64-bit implementation for speed improvements.
*/
unsafe extern "C" fn fastlz_memmove(mut dest: *mut uint8_t,
mut src: *const uint8_t,
mut count: uint32_t) {
if count > 4 as libc::c_int as libc::c_uint &&
dest >= src.offset(count as isize) as *mut uint8_t {
memmove(dest as *mut libc::c_void, src as *const libc::c_void,
count as libc::c_ulong);
} else {
let mut current_block_7: u64;
match count {
3 => {
let fresh2 = src;
src = src.offset(1);
let fresh3 = dest;
dest = dest.offset(1);
*fresh3 = *fresh2;
current_block_7 = 10826308906768316228;
}
2 => { current_block_7 = 10826308906768316228; }
1 => { current_block_7 = 14043750191174823896; }
0 => { current_block_7 = 2968425633554183086; }
_ => {
loop {
let fresh0 = src;
src = src.offset(1);
let fresh1 = dest;
dest = dest.offset(1);
*fresh1 = *fresh0;
count = count.wrapping_sub(1);
if !(count != 0) { break ; }
}
current_block_7 = 2968425633554183086;
}
}
match current_block_7 {
10826308906768316228 => {
let fresh4 = src;
src = src.offset(1);
let fresh5 = dest;
dest = dest.offset(1);
*fresh5 = *fresh4;
current_block_7 = 14043750191174823896;
}
_ => { }
}
match current_block_7 {
14043750191174823896 => {
let fresh6 = src;
src = src.offset(1);
let fresh7 = dest;
dest = dest.offset(1);
*fresh7 = *fresh6
}
_ => { }
}
};
}
unsafe extern "C" fn fastlz_memcpy(mut dest: *mut uint8_t,
mut src: *const uint8_t,
mut count: uint32_t) {
memcpy(dest as *mut libc::c_void, src as *const libc::c_void,
count as libc::c_ulong);
}
unsafe extern "C" fn flz_readu32(mut ptr: *const libc::c_void) -> uint32_t {
return *(ptr as *const uint32_t);
}
unsafe extern "C" fn flz_readu64(mut ptr: *const libc::c_void) -> uint64_t {
return *(ptr as *const uint64_t);
}
unsafe extern "C" fn flz_cmp(mut p: *const uint8_t, mut q: *const uint8_t,
mut r: *const uint8_t) -> uint32_t {
let mut start: *const uint8_t = p;
if flz_readu64(p as *const libc::c_void) ==
flz_readu64(q as *const libc::c_void) {
p = p.offset(8 as libc::c_int as isize);
q = q.offset(8 as libc::c_int as isize)
}
if flz_readu32(p as *const libc::c_void) ==
flz_readu32(q as *const libc::c_void) {
p = p.offset(4 as libc::c_int as isize);
q = q.offset(4 as libc::c_int as isize)
}
while q < r {
let fresh8 = p;
p = p.offset(1);
let fresh9 = q;
q = q.offset(1);
if *fresh8 as libc::c_int != *fresh9 as libc::c_int { break ; }
}
return p.wrapping_offset_from(start) as libc::c_long as uint32_t;
}
unsafe extern "C" fn flz_copy64(mut dest: *mut uint8_t,
mut src: *const uint8_t,
mut count: uint32_t) {
let mut p: *const uint64_t = src as *const uint64_t;
let mut q: *mut uint64_t = dest as *mut uint64_t;
if count < 16 as libc::c_int as libc::c_uint {
if count >= 8 as libc::c_int as libc::c_uint {
let fresh10 = p;
p = p.offset(1);
let fresh11 = q;
q = q.offset(1);
*fresh11 = *fresh10
}
let fresh12 = p;
p = p.offset(1);
let fresh13 = q;
q = q.offset(1);
*fresh13 = *fresh12
} else {
let fresh14 = p;
p = p.offset(1);
let fresh15 = q;
q = q.offset(1);
*fresh15 = *fresh14;
let fresh16 = p;
p = p.offset(1);
let fresh17 = q;
q = q.offset(1);
*fresh17 = *fresh16;
let fresh18 = p;
p = p.offset(1);
let fresh19 = q;
q = q.offset(1);
*fresh19 = *fresh18;
let fresh20 = p;
p = p.offset(1);
let fresh21 = q;
q = q.offset(1);
*fresh21 = *fresh20
};
}
unsafe extern "C" fn flz_copy256(mut dest: *mut libc::c_void,
mut src: *const libc::c_void) {
let mut p: *const uint64_t = src as *const uint64_t;
let mut q: *mut uint64_t = dest as *mut uint64_t;
let fresh22 = p;
p = p.offset(1);
let fresh23 = q;
q = q.offset(1);
*fresh23 = *fresh22;
let fresh24 = p;
p = p.offset(1);
let fresh25 = q;
q = q.offset(1);
*fresh25 = *fresh24;
let fresh26 = p;
p = p.offset(1);
let fresh27 = q;
q = q.offset(1);
*fresh27 = *fresh26;
let fresh28 = p;
p = p.offset(1);
let fresh29 = q;
q = q.offset(1);
*fresh29 = *fresh28;
}
unsafe extern "C" fn flz_hash(mut v: uint32_t) -> uint16_t {
let mut h: uint32_t =
(v as libc::c_longlong * 2654435769 as libc::c_longlong >>
32 as libc::c_int - 14 as libc::c_int) as uint32_t;
return (h &
(((1 as libc::c_int) << 14 as libc::c_int) - 1 as libc::c_int)
as libc::c_uint) as uint16_t;
}
unsafe extern "C" fn flz_literals(mut runs: uint32_t, mut src: *const uint8_t,
mut dest: *mut uint8_t) -> *mut uint8_t {
while runs >= 32 as libc::c_int as libc::c_uint {
let fresh30 = dest;
dest = dest.offset(1);
*fresh30 = (32 as libc::c_int - 1 as libc::c_int) as uint8_t;
flz_copy256(dest as *mut libc::c_void, src as *const libc::c_void);
src = src.offset(32 as libc::c_int as isize);
dest = dest.offset(32 as libc::c_int as isize);
runs =
(runs as
libc::c_uint).wrapping_sub(32 as libc::c_int as libc::c_uint)
as uint32_t as uint32_t
}
if runs > 0 as libc::c_int as libc::c_uint {
let fresh31 = dest;
dest = dest.offset(1);
*fresh31 =
runs.wrapping_sub(1 as libc::c_int as libc::c_uint) as uint8_t;
flz_copy64(dest, src, runs);
dest = dest.offset(runs as isize)
}
return dest;
}
/* special case of memcpy: at most 32 bytes */
unsafe extern "C" fn flz_smallcopy(mut dest: *mut uint8_t,
mut src: *const uint8_t,
mut count: uint32_t) {
if count >= 8 as libc::c_int as libc::c_uint {
let mut p: *const uint64_t =
src as *const uint64_t; /* because readU32 */
let mut q: *mut uint64_t = dest as *mut uint64_t;
while count > 8 as libc::c_int as libc::c_uint {
let fresh32 = p;
p = p.offset(1);
let fresh33 = q;
q = q.offset(1);
*fresh33 = *fresh32;
count =
(count as
libc::c_uint).wrapping_sub(8 as libc::c_int as
libc::c_uint) as uint32_t
as uint32_t;
dest = dest.offset(8 as libc::c_int as isize);
src = src.offset(8 as libc::c_int as isize)
}
}
fastlz_memcpy(dest, src, count);
}
unsafe extern "C" fn flz_finalize(mut runs: uint32_t, mut src: *const uint8_t,
mut dest: *mut uint8_t) -> *mut uint8_t {
while runs >= 32 as libc::c_int as libc::c_uint {
let fresh34 = dest;
dest = dest.offset(1);
*fresh34 = (32 as libc::c_int - 1 as libc::c_int) as uint8_t;
flz_smallcopy(dest, src, 32 as libc::c_int as uint32_t);
src = src.offset(32 as libc::c_int as isize);
dest = dest.offset(32 as libc::c_int as isize);
runs =
(runs as
libc::c_uint).wrapping_sub(32 as libc::c_int as libc::c_uint)
as uint32_t as uint32_t
}
if runs > 0 as libc::c_int as libc::c_uint {
let fresh35 = dest;
dest = dest.offset(1);
*fresh35 =
runs.wrapping_sub(1 as libc::c_int as libc::c_uint) as uint8_t;
flz_smallcopy(dest, src, runs);
dest = dest.offset(runs as isize)
}
return dest;
}
unsafe extern "C" fn flz1_match(mut len: uint32_t, mut distance: uint32_t,
mut op: *mut uint8_t) -> *mut uint8_t {
distance = distance.wrapping_sub(1);
if (len > (264 as libc::c_int - 2 as libc::c_int) as libc::c_uint) as
libc::c_int as libc::c_long != 0 {
while len > (264 as libc::c_int - 2 as libc::c_int) as libc::c_uint {
let fresh36 = op;
op = op.offset(1);
*fresh36 =
(((7 as libc::c_int) << 5 as libc::c_int) as
libc::c_uint).wrapping_add(distance >> 8 as libc::c_int)
as uint8_t;
let fresh37 = op;
op = op.offset(1);
*fresh37 =
(264 as libc::c_int - 2 as libc::c_int - 7 as libc::c_int -
2 as libc::c_int) as uint8_t;
let fresh38 = op;
op = op.offset(1);
*fresh38 =
(distance & 255 as libc::c_int as libc::c_uint) as uint8_t;
len =
(len as
libc::c_uint).wrapping_sub((264 as libc::c_int -
2 as libc::c_int) as
libc::c_uint) as uint32_t
as uint32_t
}
}
if len < 7 as libc::c_int as libc::c_uint {
let fresh39 = op;
op = op.offset(1);
*fresh39 =
(len <<
5 as libc::c_int).wrapping_add(distance >> 8 as libc::c_int)
as uint8_t;
let fresh40 = op;
op = op.offset(1);
*fresh40 = (distance & 255 as libc::c_int as libc::c_uint) as uint8_t
} else {
let fresh41 = op;
op = op.offset(1);
*fresh41 =
(((7 as libc::c_int) << 5 as libc::c_int) as
libc::c_uint).wrapping_add(distance >> 8 as libc::c_int) as
uint8_t;
let fresh42 = op;
op = op.offset(1);
*fresh42 =
len.wrapping_sub(7 as libc::c_int as libc::c_uint) as uint8_t;
let fresh43 = op;
op = op.offset(1);
*fresh43 = (distance & 255 as libc::c_int as libc::c_uint) as uint8_t
}
return op;
}
#[no_mangle]
pub unsafe extern "C" fn fastlz1_compress(mut input: *const libc::c_void,
mut length: libc::c_int,
mut output: *mut libc::c_void)
-> libc::c_int {
let mut ip: *const uint8_t = input as *const uint8_t;
let mut ip_start: *const uint8_t = ip;
let mut ip_bound: *const uint8_t =
ip.offset(length as isize).offset(-(4 as libc::c_int as isize));
let mut ip_limit: *const uint8_t =
ip.offset(length as
isize).offset(-(12 as libc::c_int as
isize)).offset(-(1 as libc::c_int as
isize));
let mut op: *mut uint8_t = output as *mut uint8_t;
let mut htab: [uint32_t; 16384] = [0; 16384];
let mut seq: uint32_t = 0;
let mut hash: uint32_t = 0;
/* initializes hash table */
hash = 0 as libc::c_int as uint32_t;
while hash < ((1 as libc::c_int) << 14 as libc::c_int) as libc::c_uint {
htab[hash as usize] = 0 as libc::c_int as uint32_t;
hash = hash.wrapping_add(1)
}
/* we start with literal copy */
let mut anchor: *const uint8_t = ip;
ip = ip.offset(2 as libc::c_int as isize);
/* main loop */
while (ip < ip_limit) as libc::c_int as libc::c_long != 0 {
let mut ref_0: *const uint8_t = 0 as *const uint8_t;
let mut distance: uint32_t = 0;
let mut cmp: uint32_t = 0;
loop
/* find potential match */
{
seq =
flz_readu32(ip as *const libc::c_void) &
0xffffff as libc::c_int as libc::c_uint;
hash = flz_hash(seq) as uint32_t;
ref_0 = ip_start.offset(htab[hash as usize] as isize);
htab[hash as usize] =
ip.wrapping_offset_from(ip_start) as libc::c_long as uint32_t;
distance =
ip.wrapping_offset_from(ref_0) as libc::c_long as uint32_t;
cmp =
if (distance < 8192 as libc::c_int as libc::c_uint) as
libc::c_int as libc::c_long != 0 {
(flz_readu32(ref_0 as *const libc::c_void)) &
0xffffff as libc::c_int as libc::c_uint
} else { 0x1000000 as libc::c_int as libc::c_uint };
if (ip >= ip_limit) as libc::c_int as libc::c_long != 0 {
break ;
}
ip = ip.offset(1);
if !(seq != cmp) { break ; }
}
if (ip >= ip_limit) as libc::c_int as libc::c_long != 0 { break ; }
ip = ip.offset(-1);
if (ip > anchor) as libc::c_int as libc::c_long != 0 {
op =
flz_literals(ip.wrapping_offset_from(anchor) as libc::c_long
as uint32_t, anchor, op)
}
let mut len: uint32_t =
flz_cmp(ref_0.offset(3 as libc::c_int as isize),
ip.offset(3 as libc::c_int as isize), ip_bound);
op = flz1_match(len, distance, op);
/* update the hash at match boundary */
ip = ip.offset(len as isize);
seq = flz_readu32(ip as *const libc::c_void);
hash =
flz_hash(seq & 0xffffff as libc::c_int as libc::c_uint) as
uint32_t;
let fresh44 = ip;
ip = ip.offset(1);
htab[hash as usize] =
fresh44.wrapping_offset_from(ip_start) as libc::c_long as
uint32_t;
seq >>= 8 as libc::c_int;
hash = flz_hash(seq) as uint32_t;
let fresh45 = ip;
ip = ip.offset(1);
htab[hash as usize] =
fresh45.wrapping_offset_from(ip_start) as libc::c_long as
uint32_t;
anchor = ip
}
let mut copy: uint32_t =
(input as
*mut uint8_t).offset(length as
isize).wrapping_offset_from(anchor) as
libc::c_long as uint32_t;
op = flz_finalize(copy, anchor, op);
return op.wrapping_offset_from(output as *mut uint8_t) as libc::c_long as
libc::c_int;
}
#[no_mangle]
pub unsafe extern "C" fn fastlz1_decompress(mut input: *const libc::c_void,
mut length: libc::c_int,
mut output: *mut libc::c_void,
mut maxout: libc::c_int)
-> libc::c_int {
let mut ip: *const uint8_t = input as *const uint8_t;
let mut ip_limit: *const uint8_t = ip.offset(length as isize);
let mut ip_bound: *const uint8_t =
ip_limit.offset(-(2 as libc::c_int as isize));
let mut op: *mut uint8_t = output as *mut uint8_t;
let mut op_limit: *mut uint8_t = op.offset(maxout as isize);
let fresh46 = ip;
ip = ip.offset(1);
let mut ctrl: uint32_t =
(*fresh46 as libc::c_int & 31 as libc::c_int) as uint32_t;
loop {
if ctrl >= 32 as libc::c_int as libc::c_uint {
let mut len: uint32_t =
(ctrl >>
5 as
libc::c_int).wrapping_sub(1 as libc::c_int as
libc::c_uint);
let mut ofs: uint32_t =
(ctrl & 31 as libc::c_int as libc::c_uint) <<
8 as libc::c_int;
let mut ref_0: *const uint8_t =
op.offset(-(ofs as
isize)).offset(-(1 as libc::c_int as isize));
if len == (7 as libc::c_int - 1 as libc::c_int) as libc::c_uint {
if !(ip <= ip_bound) as libc::c_int as libc::c_long != 0 {
return 0 as libc::c_int
}
let fresh47 = ip;
ip = ip.offset(1);
len =
(len as
libc::c_uint).wrapping_add(*fresh47 as libc::c_uint)
as uint32_t as uint32_t
}
let fresh48 = ip;
ip = ip.offset(1);
ref_0 = ref_0.offset(-(*fresh48 as libc::c_int as isize));
len =
(len as
libc::c_uint).wrapping_add(3 as libc::c_int as
libc::c_uint) as uint32_t
as uint32_t;
if !(op.offset(len as isize) <= op_limit) as libc::c_int as
libc::c_long != 0 {
return 0 as libc::c_int
}
if !(ref_0 >= output as *mut uint8_t) as libc::c_int as
libc::c_long != 0 {
return 0 as libc::c_int
}
fastlz_memmove(op, ref_0, len);
op = op.offset(len as isize)
} else {
ctrl = ctrl.wrapping_add(1);
if !(op.offset(ctrl as isize) <= op_limit) as libc::c_int as
libc::c_long != 0 {
return 0 as libc::c_int
}
if !(ip.offset(ctrl as isize) <= ip_limit) as libc::c_int as
libc::c_long != 0 {
return 0 as libc::c_int
}
fastlz_memcpy(op, ip, ctrl);
ip = ip.offset(ctrl as isize);
op = op.offset(ctrl as isize)
}
if (ip > ip_bound) as libc::c_int as libc::c_long != 0 { break ; }
let fresh49 = ip;
ip = ip.offset(1);
ctrl = *fresh49 as uint32_t
}
return op.wrapping_offset_from(output as *mut uint8_t) as libc::c_long as
libc::c_int;
}
unsafe extern "C" fn flz2_match(mut len: uint32_t, mut distance: uint32_t,
mut op: *mut uint8_t) -> *mut uint8_t {
distance = distance.wrapping_sub(1);
if distance < 8191 as libc::c_int as libc::c_uint {
if len < 7 as libc::c_int as libc::c_uint {
let fresh50 = op;
op = op.offset(1);
*fresh50 =
(len <<
5 as
libc::c_int).wrapping_add(distance >>
8 as libc::c_int) as
uint8_t;
let fresh51 = op;
op = op.offset(1);
*fresh51 =
(distance & 255 as libc::c_int as libc::c_uint) as uint8_t
} else {
let fresh52 = op;
op = op.offset(1);
*fresh52 =
(((7 as libc::c_int) << 5 as libc::c_int) as
libc::c_uint).wrapping_add(distance >> 8 as libc::c_int)
as uint8_t;
len =
(len as
libc::c_uint).wrapping_sub(7 as libc::c_int as
libc::c_uint) as uint32_t
as uint32_t;
while len >= 255 as libc::c_int as libc::c_uint {
let fresh53 = op;
op = op.offset(1);
*fresh53 = 255 as libc::c_int as uint8_t;
len =
(len as
libc::c_uint).wrapping_sub(255 as libc::c_int as
libc::c_uint) as
uint32_t as uint32_t
}
let fresh54 = op;
op = op.offset(1);
*fresh54 = len as uint8_t;
let fresh55 = op;
op = op.offset(1);
*fresh55 =
(distance & 255 as libc::c_int as libc::c_uint) as uint8_t
}
} else if len < 7 as libc::c_int as libc::c_uint {
distance =
(distance as
libc::c_uint).wrapping_sub(8191 as libc::c_int as
libc::c_uint) as uint32_t as
uint32_t;
let fresh56 = op;
op = op.offset(1);
*fresh56 =
(len <<
5 as
libc::c_int).wrapping_add(31 as libc::c_int as
libc::c_uint) as uint8_t;
let fresh57 = op;
op = op.offset(1);
*fresh57 = 255 as libc::c_int as uint8_t;
let fresh58 = op;
op = op.offset(1);
*fresh58 = (distance >> 8 as libc::c_int) as uint8_t;
let fresh59 = op;
op = op.offset(1);
*fresh59 = (distance & 255 as libc::c_int as libc::c_uint) as uint8_t
} else {
distance =
(distance as
libc::c_uint).wrapping_sub(8191 as libc::c_int as
libc::c_uint) as uint32_t as
uint32_t;
let fresh60 = op;
op = op.offset(1);
*fresh60 =
(((7 as libc::c_int) << 5 as libc::c_int) + 31 as libc::c_int) as
uint8_t;
len =
(len as
libc::c_uint).wrapping_sub(7 as libc::c_int as libc::c_uint)
as uint32_t as uint32_t;
while len >= 255 as libc::c_int as libc::c_uint {
let fresh61 = op;
op = op.offset(1);
*fresh61 = 255 as libc::c_int as uint8_t;
len =
(len as
libc::c_uint).wrapping_sub(255 as libc::c_int as
libc::c_uint) as uint32_t
as uint32_t
}
let fresh62 = op;
op = op.offset(1);
*fresh62 = len as uint8_t;
let fresh63 = op;
op = op.offset(1);
*fresh63 = 255 as libc::c_int as uint8_t;
let fresh64 = op;
op = op.offset(1);
*fresh64 = (distance >> 8 as libc::c_int) as uint8_t;
let fresh65 = op;
op = op.offset(1);
*fresh65 = (distance & 255 as libc::c_int as libc::c_uint) as uint8_t
}
return op;
}
#[no_mangle]
pub unsafe extern "C" fn fastlz2_compress(mut input: *const libc::c_void,
mut length: libc::c_int,
mut output: *mut libc::c_void)
-> libc::c_int {
let mut ip: *const uint8_t = input as *const uint8_t;
let mut ip_start: *const uint8_t = ip;
/* far away, but not yet in the another galaxy... */
let mut ip_bound: *const uint8_t =
ip.offset(length as
isize).offset(-(4 as libc::c_int as
isize)); /* because readU32 */
let mut ip_limit: *const uint8_t =
ip.offset(length as
isize).offset(-(12 as libc::c_int as
isize)).offset(-(1 as libc::c_int as
isize));
let mut op: *mut uint8_t = output as *mut uint8_t;
let mut htab: [uint32_t; 16384] = [0; 16384];
let mut seq: uint32_t = 0;
let mut hash: uint32_t = 0;
/* initializes hash table */
hash = 0 as libc::c_int as uint32_t;
while hash < ((1 as libc::c_int) << 14 as libc::c_int) as libc::c_uint {
htab[hash as usize] = 0 as libc::c_int as uint32_t;
hash = hash.wrapping_add(1)
}
/* we start with literal copy */
let mut anchor: *const uint8_t = ip;
ip = ip.offset(2 as libc::c_int as isize);
/* main loop */
while (ip < ip_limit) as libc::c_int as libc::c_long != 0 {
let mut ref_0: *const uint8_t = 0 as *const uint8_t;
let mut distance: uint32_t = 0;
let mut cmp: uint32_t = 0;
loop
/* find potential match */
{
seq =
flz_readu32(ip as *const libc::c_void) &
0xffffff as libc::c_int as libc::c_uint;
hash = flz_hash(seq) as uint32_t;
ref_0 = ip_start.offset(htab[hash as usize] as isize);
htab[hash as usize] =
ip.wrapping_offset_from(ip_start) as libc::c_long as uint32_t;
distance =
ip.wrapping_offset_from(ref_0) as libc::c_long as uint32_t;
cmp =
if (distance <
(65535 as libc::c_int + 8191 as libc::c_int -
1 as libc::c_int) as libc::c_uint) as libc::c_int
as libc::c_long != 0 {
(flz_readu32(ref_0 as *const libc::c_void)) &
0xffffff as libc::c_int as libc::c_uint
} else { 0x1000000 as libc::c_int as libc::c_uint };
if (ip >= ip_limit) as libc::c_int as libc::c_long != 0 {
break ;
}
ip = ip.offset(1);
if !(seq != cmp) { break ; }
}
if (ip >= ip_limit) as libc::c_int as libc::c_long != 0 { break ; }
ip = ip.offset(-1);
/* far, needs at least 5-byte match */
if distance >= 8191 as libc::c_int as libc::c_uint {
if *ref_0.offset(3 as libc::c_int as isize) as libc::c_int !=
*ip.offset(3 as libc::c_int as isize) as libc::c_int ||
*ref_0.offset(4 as libc::c_int as isize) as libc::c_int !=
*ip.offset(4 as libc::c_int as isize) as libc::c_int {
ip = ip.offset(1);
continue ;
}
}
if (ip > anchor) as libc::c_int as libc::c_long != 0 {
op =
flz_literals(ip.wrapping_offset_from(anchor) as libc::c_long
as uint32_t, anchor, op)
}
let mut len: uint32_t =
flz_cmp(ref_0.offset(3 as libc::c_int as isize),
ip.offset(3 as libc::c_int as isize), ip_bound);
op = flz2_match(len, distance, op);
/* update the hash at match boundary */
ip = ip.offset(len as isize);
seq = flz_readu32(ip as *const libc::c_void);
hash =
flz_hash(seq & 0xffffff as libc::c_int as libc::c_uint) as
uint32_t;
let fresh66 = ip;
ip = ip.offset(1);
htab[hash as usize] =
fresh66.wrapping_offset_from(ip_start) as libc::c_long as
uint32_t;
seq >>= 8 as libc::c_int;
hash = flz_hash(seq) as uint32_t;
let fresh67 = ip;
ip = ip.offset(1);
htab[hash as usize] =
fresh67.wrapping_offset_from(ip_start) as libc::c_long as
uint32_t;
anchor = ip
}
let mut copy: uint32_t =
(input as
*mut uint8_t).offset(length as
isize).wrapping_offset_from(anchor) as
libc::c_long as uint32_t;
op = flz_finalize(copy, anchor, op);
/* marker for fastlz2 */
let ref mut fresh68 = *(output as *mut uint8_t);
*fresh68 =
(*fresh68 as libc::c_int | (1 as libc::c_int) << 5 as libc::c_int) as
uint8_t;
return op.wrapping_offset_from(output as *mut uint8_t) as libc::c_long as
libc::c_int;
}
#[no_mangle]
pub unsafe extern "C" fn fastlz2_decompress(mut input: *const libc::c_void,
mut length: libc::c_int,
mut output: *mut libc::c_void,
mut maxout: libc::c_int)
-> libc::c_int {
let mut ip: *const uint8_t = input as *const uint8_t;
let mut ip_limit: *const uint8_t = ip.offset(length as isize);
let mut ip_bound: *const uint8_t =
ip_limit.offset(-(2 as libc::c_int as isize));
let mut op: *mut uint8_t = output as *mut uint8_t;
let mut op_limit: *mut uint8_t = op.offset(maxout as isize);
let fresh69 = ip;
ip = ip.offset(1);
let mut ctrl: uint32_t =
(*fresh69 as libc::c_int & 31 as libc::c_int) as uint32_t;
loop {
if ctrl >= 32 as libc::c_int as libc::c_uint {
let mut len: uint32_t =
(ctrl >>
5 as
libc::c_int).wrapping_sub(1 as libc::c_int as
libc::c_uint);
let mut ofs: uint32_t =
(ctrl & 31 as libc::c_int as libc::c_uint) <<
8 as libc::c_int;
let mut ref_0: *const uint8_t =
op.offset(-(ofs as
isize)).offset(-(1 as libc::c_int as isize));
let mut code: uint8_t = 0;
if len == (7 as libc::c_int - 1 as libc::c_int) as libc::c_uint {
loop {
if !(ip <= ip_bound) as libc::c_int as libc::c_long != 0 {
return 0 as libc::c_int
}
let fresh70 = ip;
ip = ip.offset(1);
code = *fresh70;
len =
(len as
libc::c_uint).wrapping_add(code as libc::c_uint)
as uint32_t as uint32_t;
if !(code as libc::c_int == 255 as libc::c_int) {
break ;
}
}
}
let fresh71 = ip;
ip = ip.offset(1);
code = *fresh71;
ref_0 = ref_0.offset(-(code as libc::c_int as isize));
len =
(len as
libc::c_uint).wrapping_add(3 as libc::c_int as
libc::c_uint) as uint32_t
as uint32_t;
/* match from 16-bit distance */
if (code as libc::c_int == 255 as libc::c_int) as libc::c_int as
libc::c_long != 0 {
if (ofs ==
((31 as libc::c_int) << 8 as libc::c_int) as
libc::c_uint) as libc::c_int as libc::c_long != 0
{
if !(ip < ip_bound) as libc::c_int as libc::c_long != 0 {
return 0 as libc::c_int
}
let fresh72 = ip;
ip = ip.offset(1);
ofs =
((*fresh72 as libc::c_int) << 8 as libc::c_int) as
uint32_t;
let fresh73 = ip;
ip = ip.offset(1);
ofs =
(ofs as
libc::c_uint).wrapping_add(*fresh73 as
libc::c_uint) as
uint32_t as uint32_t;
ref_0 =
op.offset(-(ofs as
isize)).offset(-(8191 as libc::c_int
as
isize)).offset(-(1
as
libc::c_int
as
isize))
}
}
if !(op.offset(len as isize) <= op_limit) as libc::c_int as
libc::c_long != 0 {
return 0 as libc::c_int
}
if !(ref_0 >= output as *mut uint8_t) as libc::c_int as
libc::c_long != 0 {
return 0 as libc::c_int
}
fastlz_memmove(op, ref_0, len);
op = op.offset(len as isize)
} else {
ctrl = ctrl.wrapping_add(1);
if !(op.offset(ctrl as isize) <= op_limit) as libc::c_int as
libc::c_long != 0 {
return 0 as libc::c_int
}
if !(ip.offset(ctrl as isize) <= ip_limit) as libc::c_int as
libc::c_long != 0 {
return 0 as libc::c_int
}
fastlz_memcpy(op, ip, ctrl);
ip = ip.offset(ctrl as isize);
op = op.offset(ctrl as isize)
}
if (ip >= ip_limit) as libc::c_int as libc::c_long != 0 { break ; }
let fresh74 = ip;
ip = ip.offset(1);
ctrl = *fresh74 as uint32_t
}
return op.wrapping_offset_from(output as *mut uint8_t) as libc::c_long as
libc::c_int;
}
/* *
DEPRECATED.
This is similar to fastlz_compress_level above, but with the level
automatically chosen.
This function is deprecated and it will be completely removed in some future
version.
*/
#[no_mangle]
pub unsafe extern "C" fn fastlz_compress(mut input: *const libc::c_void,
mut length: libc::c_int,
mut output: *mut libc::c_void)
-> libc::c_int {
/* for short block, choose fastlz1 */
if length < 65536 as libc::c_int {
return fastlz1_compress(input, length, output)
}
/* else... */
return fastlz2_compress(input, length, output);
}
/// Decompress a block of compressed data and returns the size of the
/// decompressed block. If error occurs, e.g. the compressed data is
/// corrupted or the output buffer is not large enough, then 0 (zero)
/// will be returned instead.
///
/// The input buffer and the output buffer can not overlap.
///
/// Decompression is memory safe and guaranteed not to write the output buffer
/// more than what is specified in maxout.
///
/// Note that the decompression will always work, regardless of the
/// compression level specified in fastlz_compress_level above (when
/// producing the compressed block).
///
#[no_mangle]
pub fn fastlz_decompress(input: &[u8], mut output: &[u8]) -> i32 {
let length: i32 = input.len().try_into().unwrap();
let maxout: i32 = output.len().try_into().unwrap();
// magic identifier for compression level
let level = (input[0] >> 5 ) + 1;
match level {
1 => {
unsafe {
fastlz1_decompress(
input.as_ptr() as *const libc::c_void,
length,
output.as_ptr() as *mut libc::c_void,
maxout
)
}
}
2 => {
unsafe {
fastlz2_decompress(
input.as_ptr() as *const libc::c_void,
length,
output.as_ptr() as *mut libc::c_void,
maxout
)
}
}
// unknown level, trigger error
_ => 0
}
}
/// Compress a block of data in the input buffer and returns the size of
/// compressed block. The size of input buffer is specified by length. The
/// minimum input buffer size is 16.
///
/// The output buffer must be at least 5% larger than the input buffer
/// and can not be smaller than 66 bytes.
///
/// If the input is not compressible, the return value might be larger than
/// length (input buffer size).
///
/// The input buffer and the output buffer can not overlap.
///
/// Compression level can be specified in parameter level. At the moment,
/// only level 1 and level 2 are supported.
/// Level 1 is the fastest compression and generally useful for short data.
/// Level 2 is slightly slower but it gives better compression ratio.
///
/// Note that the compressed data, regardless of the level, can always be
/// decompressed using the function fastlz_decompress below.
///
#[no_mangle]
pub fn fastlz_compress_level(level: i32, input: &[u8], mut output: &[u8]) -> i32 {
let length: i32 = input.len().try_into().unwrap();
match level {
1 => {
unsafe{
fastlz1_compress(
input.as_ptr() as *const libc::c_void,
length,
output .as_ptr() as *mut libc::c_void,
)
}
}
2 => {
unsafe{
fastlz2_compress(
input.as_ptr() as *const libc::c_void,
length,
output .as_ptr() as *mut libc::c_void,
)
}
}
_ => 0
}
}
#[cfg(test)]
mod tests {
use super::*;
const fixture_orig: &[u8] = include_bytes!("../tests/data/sample.txt");
const fixture_comp_lv1: &[u8] = include_bytes!("../tests/data/compressed-lvl1.lz");
const fixture_comp_lv2: &[u8] = include_bytes!("../tests/data/compressed-lvl2.lz");
#[test]
fn test_fastlz_decompress_with_level1_input() {
let mut output = vec![0u8; fixture_orig.len()];
fastlz_decompress(&fixture_comp_lv1, &output);
assert_eq!(fixture_orig, &output[..]);
}
#[test]
fn test_fastlz_decompress_with_level2_input() {
let mut output = vec![0u8; fixture_orig.len()];
fastlz_decompress(&fixture_comp_lv2, &output);
assert_eq!(fixture_orig, &output[..]);
}
}
|
use cocoa::base::{id, nil};
use cocoa::foundation::NSUInteger;
use sys::MTLRenderPassDescriptor;
use {FromRaw, IntoRaw, RenderPassColorAttachmentDescriptor,
RenderPassColorAttachmentDescriptorArray, RenderPassDepthAttachmentDescriptor,
RenderPassStencilAttachmentDescriptor};
pub struct RenderPassDescriptor(id);
impl RenderPassDescriptor {
pub fn new() -> Self {
unsafe { FromRaw::from_raw(MTLRenderPassDescriptor::renderPassDescriptor(nil)).unwrap() }
}
pub fn color_attachments(&self) -> RenderPassColorAttachmentDescriptorArray {
unsafe { FromRaw::from_raw(self.0.colorAttachments()).unwrap() }
}
pub fn set_color_attachments(&mut self, attachments: RenderPassColorAttachmentDescriptorArray) {
unsafe { self.0.setColorAttachments(attachments.into_raw()) }
}
pub fn depth_attachment(&self) -> RenderPassDepthAttachmentDescriptor {
unsafe { FromRaw::from_raw(self.0.depthAttachment()).unwrap() }
}
pub fn set_depth_attachment(&mut self, attachment: RenderPassDepthAttachmentDescriptor) {
unsafe { self.0.setDepthAttachment(attachment.into_raw()) }
}
pub fn stencil_attachment(&self) -> RenderPassStencilAttachmentDescriptor {
unsafe { FromRaw::from_raw(self.0.stencilAttachment()).unwrap() }
}
pub fn set_stencil_attachment(&mut self, attachment: RenderPassStencilAttachmentDescriptor) {
unsafe { self.0.setStencilAttachment(attachment.into_raw()) }
}
#[cfg(target_os = "macos")]
pub fn render_target_array_length(&self) -> usize {
unsafe { self.0.renderTargetArrayLength() as usize }
}
#[cfg(target_os = "macos")]
pub fn set_render_target_array_length(&mut self, target_array_length: usize) {
unsafe { self.0.setRenderTargetArrayLength(target_array_length as NSUInteger) }
}
/// Attempt to downcast this descriptor to a `RenderPassDepthAttachmentDescriptor`. If `self`
/// is not a `RenderPassDepthAttachmentDescriptor`, then `self` will be returned in the `Err`
/// branch.
pub fn downcast_to_depth_descriptor(self) -> Result<RenderPassDepthAttachmentDescriptor, Self> {
match FromRaw::from_raw(self.0) {
Ok(descriptor) => Ok(descriptor),
Err(_) => Err(self),
}
}
/// Attempt to downcast this descriptor to a `RenderPassColorAttachmentDescriptor`. If `self`
/// is not a `RenderPassColorAttachmentDescriptor`, then `self` will be returned in the `Err`
/// branch.
pub fn downcast_to_color_descriptor(self) -> Result<RenderPassColorAttachmentDescriptor, Self> {
match FromRaw::from_raw(self.0) {
Ok(descriptor) => Ok(descriptor),
Err(_) => Err(self),
}
}
/// Attempt to downcast this descriptor to a `RenderPassStencilAttachmentDescriptor`. If `self`
/// is not a `RenderPassStencilAttachmentDescriptor`, then `self` will be returned in the `Err`
/// branch.
pub fn downcast_to_stencil_descriptor(
self)
-> Result<RenderPassStencilAttachmentDescriptor, Self> {
match FromRaw::from_raw(self.0) {
Ok(descriptor) => Ok(descriptor),
Err(_) => Err(self),
}
}
}
impl Clone for RenderPassDescriptor {
fn clone(&self) -> Self {
let cloned = unsafe { self.0.copy() };
FromRaw::from_raw(cloned).unwrap()
}
}
impl_from_into_raw!(RenderPassDescriptor, of class "MTLRenderPassDescriptor");
|
extern crate vulkano;
extern crate vulkano_shaders;
extern crate vulkano_win;
extern crate winit;
use clap::App;
use clap::Arg;
use kikansha::engine::State;
use kikansha::figure::FigureMutation;
use kikansha::figure::FigureSet;
use kikansha::figure::RenderableMesh;
use kikansha::scene::camera::StickyRotatingCamera;
use kikansha::scene::gltf::load_figures;
use kikansha::scene::gltf::LoadingError;
use kikansha::scene::lights::PointLight;
use kikansha::scene::Scene;
use std::f32::consts::PI;
use std::process::exit;
use std::sync::Arc;
use std::sync::Mutex;
use std::time::{Duration, SystemTime};
struct QuitOnScopeExit<'a> {
quit_send: &'a std::sync::mpsc::Sender<bool>,
}
impl Drop for QuitOnScopeExit<'_> {
fn drop(&mut self) {
if std::thread::panicking() {
log::error!("Panicking");
}
let _ = self.quit_send.send(true);
}
}
fn main() {
log4rs::init_file(
"/home/michey/Projects/hello_vulkan/config/log4rs.yaml",
Default::default(),
)
.unwrap();
let matches = App::new("kikansha")
.version("1.0")
.author("")
.about("")
.arg(
Arg::with_name("debugger")
.short("d")
.long("debugger")
.help("Wait for debugger"),
)
.arg(
Arg::with_name("validation")
.short("v")
.long("validation")
.help("Run with validation layer"),
)
.arg(
Arg::with_name("color_l")
.short("c")
.long("color_l")
.takes_value(true)
.value_name("level")
.help("Set debug level for deferred shader"),
)
.get_matches();
if matches.is_present("debugger") {
let url = format!(
"vscode://vadimcn.vscode-lldb/launch/config?{{'request':'attach','pid':{}}}",
std::process::id()
);
log::info!("{}", &url);
std::process::Command::new("code")
.arg("--open-url")
.arg(url)
.output()
.unwrap();
std::thread::sleep_ms(10000); // Wait for debugger to attach
}
let color_debug_level: i32 = matches
.value_of("color_l")
.and_then(|s| s.parse::<i32>().ok())
.unwrap_or(0);
let run_with_validation = matches.is_present("validation");
let mut yaw = PI / 4.0;
let mut pitch = -PI / 4.0;
let yaw_loop = Duration::from_secs(6_u64);
let mut yaw_step = (PI * 2.0) / yaw_loop.as_millis() as f32;
let pitch_loop = Duration::from_secs(10_u64);
let mut pitch_step = PI / pitch_loop.as_millis() as f32;
let mut init_ts = SystemTime::now();
let p_camera = StickyRotatingCamera::new(5.5, yaw, pitch);
let camera = Arc::new(Mutex::new(p_camera));
let mut scene_sets: Vec<FigureSet> = Vec::new();
let teapot_scale = 1.0;
let teapot_mutations = vec![FigureMutation::new([0.0, 0.0, 0.0], teapot_scale)];
let sce2: Result<Vec<RenderableMesh>, LoadingError> =
// load_scene_from_file("/home/michey/Projects/hello_vulkan/data/models/teapot.gltf");
load_figures("/home/michey/Projects/hello_vulkan/data/models/teapot.gltf");
match sce2 {
Ok(meshes) => match meshes.first() {
Some(mesh) => {
let teapot_set = FigureSet::new(
mesh.clone(),
teapot_mutations,
"/home/michey/Projects/hello_vulkan/src/kikansha/frame/resources/tex.png"
.to_string(),
"/home/michey/Projects/hello_vulkan/src/kikansha/frame/resources/tex.png"
.to_string(),
);
scene_sets.push(teapot_set);
}
_ => {}
},
_ => {}
}
let scene = Scene::create(camera.clone(), scene_sets, PointLight::default_lights());
let sleep = Duration::from_millis(100);
let (event_send, _event_recv) = std::sync::mpsc::sync_channel(1);
let (quit_send, quit_recv) = std::sync::mpsc::channel();
std::thread::spawn(move || {
let _scoped_quit = QuitOnScopeExit {
quit_send: &quit_send,
};
log::info!("Thread created");
loop {
let current_ts = SystemTime::now();
let elapsed = current_ts.duration_since(init_ts).unwrap();
init_ts = current_ts;
let new_yaw = yaw + (elapsed.as_millis() as f32 * yaw_step);
yaw = new_yaw;
if new_yaw >= (PI * 2.0) {
yaw = PI * 2.0;
yaw_step = -yaw_step;
}
if new_yaw <= 0.0 {
yaw = 0.0;
yaw_step = -yaw_step;
}
let new_pitch = pitch + (elapsed.as_millis() as f32 * pitch_step);
pitch = new_pitch;
if new_pitch >= (PI / 2.0) {
pitch = PI / 2.0 - pitch_step.abs();
pitch_step = -pitch_step
}
if new_pitch <= -(PI / 2.0) {
pitch = -(PI / 2.0) + pitch_step.abs();
pitch_step = -pitch_step
}
{
camera.lock().unwrap().set_yaw(yaw);
}
// {
// camera.lock().unwrap().set_pitch(pitch);
// }
std::thread::sleep(sleep);
}
});
State::run_loop(
&scene,
event_send,
quit_recv,
run_with_validation,
color_debug_level,
);
}
|
use derive_more::{Deref, AsRef, From, Into};
use lazy_static::lazy_static;
use regex::Regex;
use serde::{Serialize, Deserialize};
use validator::Validate;
use validator_derive::Validate;
lazy_static! {
pub static ref USERNAME_REGEX: Regex = Regex::new(r"(?i)^[a-z\d_-]*$").unwrap();
pub static ref PASSWORD_REGEX: Regex = Regex::new(r"^[^\s]*$").unwrap();
pub static ref ROLE_REGEX: Regex = Regex::new(r"(?i)^[a-z\d_-]*$").unwrap();
pub static ref BASE64_REGEX: Regex = Regex::new("^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$").unwrap();
}
#[derive(Debug, Validate, Serialize, Deserialize, Deref, AsRef, From, Into, Clone)]
#[serde(transparent)]
pub struct Password {
#[validate(regex(path = "PASSWORD_REGEX", message = "should contain only non-whitespace chars"))]
#[validate(length(min = 6, max = 24, message = "should have 6 to 24 chars"))]
inner: String,
}
#[derive(Debug, Validate, Serialize, Deserialize, Deref, AsRef, From, Into, Clone)]
#[serde(transparent)]
pub struct Username {
#[validate(regex(path = "USERNAME_REGEX", message = "should contain only alpha numeric and underscore chars"))]
#[validate(length(min = 3, max = 24, message = "should have 3 to 24 chars"))]
inner: String,
}
#[derive(Debug, Validate, Serialize, Deserialize, Deref, AsRef, From, Into, Clone)]
#[serde(transparent)]
pub struct Email {
#[validate(email(message = "should be a valid email"))]
inner: String,
}
#[derive(Debug, Validate, Serialize, Deserialize, Deref, AsRef, From, Into, Clone)]
#[serde(transparent)]
pub struct RoleName {
#[validate(regex(path = "ROLE_REGEX", message = "should contain only alpha numeric and underscore chars"))]
#[validate(length(min = 3, max = 24, message = "should have 3 to 24 chars"))]
pub inner: String,
}
#[derive(Debug, Validate, Serialize, Deserialize, Deref, AsRef, From, Into, Clone)]
#[serde(transparent)]
pub struct Nickname {
#[validate(length(min = 3, max = 24, message = "should have 3 to 24 chars"))]
inner: String,
}
#[derive(Debug, Validate, Serialize, Deserialize, Deref, AsRef, From, Into, Clone)]
#[serde(transparent)]
pub struct Any24 {
#[validate(length(equal = 24, message = "should have 24 chars"))]
inner: String,
}
#[derive(Debug, Validate, Serialize, Deserialize, Deref, AsRef, From, Into, Clone)]
#[serde(transparent)]
pub struct Any6 {
#[validate(length(equal = 6, message = "should have 6 chars"))]
inner: String,
}
#[derive(Debug, Validate, Serialize, Deserialize, Deref, AsRef, From, Into, Clone)]
#[serde(transparent)]
pub struct Id {
id: i32,
}
#[derive(Debug, Validate, Serialize, Deserialize, Deref, AsRef, From, Into, Clone)]
#[serde(transparent)]
pub struct PaginationSize {
#[validate(range(max=50, message = "should be less or equal than 50"))]
inner: usize,
}
impl Default for PaginationSize {
fn default() -> Self {
PaginationSize { inner: 10 }
}
}
#[derive(Debug, Validate, Serialize, Deserialize, Deref, AsRef, From, Into, Clone)]
#[serde(transparent)]
pub struct Cursor {
#[validate(regex(path = "BASE64_REGEX", message = "invalid cursor"))]
inner: String,
}
|
mod patch;
mod repo;
mod tree;
use std::path::Path;
pub use repo::{GitCommit, GitRepo};
pub use tree::Tree;
pub fn run_stat<P,F,S>(
path: P,
verbose: bool,
commit_filter: F,
mut stat: S,
)
where
P: AsRef<Path>,
F: Fn(&GitCommit) -> bool,
S: FnMut(&GitRepo, &GitCommit, &Tree, usize, usize),
{
let repo = GitRepo::open(path).unwrap();
let commit = repo.find_commit(repo::VersionSpec::Head).unwrap();
if verbose {
eprintln!("Sorting commits (head = {})", commit.id().unwrap_or(git2::Oid::zero()));
}
let result = commit.topological_sort(&commit_filter).unwrap();
if verbose {
eprintln!("Found {} commits to process", result.len());
}
let plan = result.plan();
let mut trees = std::collections::BTreeMap::new();
let mut base_line_tree = None;
for i in 0..plan.len() {
let step = &plan[i];
let commit = result.get_commit(step.processing).unwrap();
let parents: Vec<_> = result
.get_parent_idx(step.processing)
.unwrap()
.iter()
.map(|pid| &trees[pid])
.collect();
let tree = if parents.len() == 0 {
if commit.is_initial_commit() {
let empty = tree::Tree::empty();
let parent_commits = result.get_parent_commits(step.processing);
let patch = commit.diff_with(parent_commits.iter(), verbose).unwrap();
if verbose {
eprintln!("Analyzing initial commit {}", commit.id().unwrap_or(git2::Oid::zero()));
}
tree::Tree::analyze_patch(&[&empty], patch.as_ref(), commit.author_id())
} else {
if verbose {
eprintln!("Analyzing boundary commit {}", commit.id().unwrap_or(git2::Oid::zero()));
}
if base_line_tree.is_none() {
let tree = tree::Tree::from_commit(&commit, repo.query_author_id("Older Code"), verbose);
base_line_tree = Some((commit.clone(), tree.clone()));
tree
} else {
let (bc, bt) = base_line_tree.as_ref().unwrap();
let patch = commit.diff_with([bc.clone()].iter(), verbose).unwrap();
tree::Tree::analyze_patch(&[bt], patch.as_ref(), repo.query_author_id("Older Code"))
}
}
} else {
let parent_commits = result.get_parent_commits(step.processing);
let patch = commit.diff_with(parent_commits.iter(),verbose).unwrap();
if verbose {
eprintln!("Analyzing commit {} (merge from {} parents)", result.get_commit(step.processing).unwrap().id().unwrap_or(git2::Oid::zero()), parent_commits.len());
}
tree::Tree::analyze_patch(parents.as_ref(), patch.as_ref(), commit.author_id())
};
stat(&repo, &commit, &tree, i, plan.len());
trees.insert(step.processing, tree);
for remove_idx in step.expired.iter() {
trees.remove(remove_idx);
}
}
}
|
use super::*;
pub fn fill(board: &mut Board, sudoku_content: String) {
let sudoku_lines: Vec<_> = sudoku_content.lines().collect();
for x in 0..BOARD_WIDTH as usize {
let row_text = sudoku_lines.get(x).unwrap_or(&"");
let numbers: Vec<_> = row_text.split_whitespace().collect();
for y in 0..BOARD_WIDTH as usize {
let coordinate = Coordinate::new(x as u8, y as u8);
let cell = board.find_cell_mut(coordinate);
let number = *numbers.get(y).unwrap_or_else(|| &"");
cell.borrow_mut().set_num_template(number);
}
}
}
|
use std::rc::Rc;
use std::cell::RefCell;
use super::curio::Curio;
use super::hall::Hall;
pub struct Room {
pub name: String,
pub contents: Vec<Curio>,
pub halls: Vec<Rc<Hall>>,
pub wumpus: bool,
}
impl PartialEq for Room {
fn eq(&self, other: &Self) -> bool {
self.name == other.name
}
}
impl Room {
pub fn neighbors(&self) -> Vec<Rc<RefCell<Room>>> {
self.halls.iter().map(|hall| hall.other(&self)).collect()
}
pub fn neighbors_string(&self) -> String {
let vec: Vec<String> = self.halls.iter().map(|hall| {
let neighbor = hall.other(&self);
let name: &str = &neighbor.borrow().name;
String::from(name)
}).collect();
vec.join(", ")
}
}
|
use std::time::Duration;
use crate::{
bson::{doc, Bson},
cmap::StreamDescription,
coll::{options::DistinctOptions, Namespace},
error::ErrorKind,
operation::{
test::{self, handle_response_test},
Distinct,
Operation,
},
};
#[test]
fn build() {
let field_name = "field_name".to_string();
let ns = Namespace {
db: "test_db".to_string(),
coll: "test_coll".to_string(),
};
let mut distinct_op = Distinct::new(ns, field_name.clone(), None, None);
let distinct_command = distinct_op
.build(&StreamDescription::new_testing())
.expect("error on build");
assert_eq!(
distinct_command.body,
doc! {
"distinct": "test_coll",
"key": field_name
}
);
assert_eq!(distinct_command.target_db, "test_db");
}
#[test]
fn build_with_query() {
let field_name = "field_name".to_string();
let query = doc! {"something" : "something else"};
let ns = Namespace {
db: "test_db".to_string(),
coll: "test_coll".to_string(),
};
let mut distinct_op = Distinct::new(ns, field_name.clone(), Some(query.clone()), None);
let distinct_command = distinct_op
.build(&StreamDescription::new_testing())
.expect("error on build");
assert_eq!(
distinct_command.body,
doc! {
"distinct": "test_coll",
"key": field_name,
"query": Bson::Document(query)
}
);
assert_eq!(distinct_command.target_db, "test_db");
}
#[test]
fn build_with_options() {
let field_name = "field_name".to_string();
let max_time = Duration::new(2_u64, 0);
let options: DistinctOptions = DistinctOptions::builder().max_time(max_time).build();
let ns = Namespace {
db: "test_db".to_string(),
coll: "test_coll".to_string(),
};
let mut distinct_op = Distinct::new(ns, field_name.clone(), None, Some(options));
let distinct_command = distinct_op
.build(&StreamDescription::new_testing())
.expect("error on build");
assert_eq!(
distinct_command.body,
doc! {
"distinct": "test_coll",
"key": field_name,
"maxTimeMS": max_time.as_millis() as i32
}
);
assert_eq!(distinct_command.target_db, "test_db");
}
#[test]
fn op_selection_criteria() {
test::op_selection_criteria(|selection_criteria| {
let options = DistinctOptions {
selection_criteria,
..Default::default()
};
Distinct::new(Namespace::empty(), String::new(), None, Some(options))
});
}
#[test]
fn handle_success() {
let distinct_op = Distinct::empty();
let expected_values: Vec<Bson> =
vec![Bson::String("A".to_string()), Bson::String("B".to_string())];
let response = doc! {
"values" : expected_values.clone(),
"ok" : 1
};
let actual_values = handle_response_test(&distinct_op, response).unwrap();
assert_eq!(actual_values, expected_values);
}
#[test]
fn handle_response_with_empty_values() {
let distinct_op = Distinct::empty();
let response = doc! {
"values" : [],
"ok" : 1
};
let expected_values: Vec<Bson> = Vec::new();
let actual_values = handle_response_test(&distinct_op, response).unwrap();
assert_eq!(actual_values, expected_values);
}
#[test]
fn handle_response_no_values() {
let distinct_op = Distinct::empty();
let response = doc! {
"ok" : 1
};
let result = handle_response_test(&distinct_op, response);
match result.map_err(|e| *e.kind) {
Err(ErrorKind::InvalidResponse { .. }) => {}
other => panic!("expected response error, but got {:?}", other),
}
}
|
use async_trait::async_trait;
use uuid::Uuid;
use common::cache::Cache;
use common::error::Error;
use common::infrastructure::cache::InMemCache;
use common::result::Result;
use crate::domain::category::{Category, CategoryId, CategoryRepository};
use crate::mocks;
pub struct InMemCategoryRepository {
cache: InMemCache<CategoryId, Category>,
}
impl InMemCategoryRepository {
pub fn new() -> Self {
InMemCategoryRepository {
cache: InMemCache::new(),
}
}
pub async fn populated() -> Self {
let repo = Self::new();
repo.save(&mut mocks::category1()).await.unwrap();
repo.save(&mut mocks::category2()).await.unwrap();
repo
}
}
impl Default for InMemCategoryRepository {
fn default() -> Self {
Self::new()
}
}
#[async_trait]
impl CategoryRepository for InMemCategoryRepository {
async fn next_id(&self) -> Result<CategoryId> {
let id = Uuid::new_v4();
CategoryId::new(id.to_string())
}
async fn find_by_id(&self, id: &CategoryId) -> Result<Category> {
self.cache
.get(id)
.await
.ok_or(Error::new("category", "not_found"))
}
async fn find_all_categories(&self) -> Result<Vec<Category>> {
Ok(self.cache.filter(|_| true).await)
}
async fn save(&self, category: &mut Category) -> Result<()> {
self.cache
.set(category.base().id().clone(), category.clone())
.await
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.