text stringlengths 8 4.13M |
|---|
extern crate rary;
fn main() {
rary::public_funcion();
rary::indirect_access();
}
|
mod simple_db {
use std::any::{Any, TypeId};
use std::collections::HashMap;
use std::fs;
use std::fs::{File, OpenOptions};
use std::io::{Read, Seek, SeekFrom};
use std::path::{Path, PathBuf};
use uuid::Uuid;
pub enum Errors {
NotFound,
}
struct Table {
// GUID -> (position, length)
data_map: HashMap<String, (u32, u32)>,
indexes: Vec<&'static dyn Fn(dyn Any) -> dyn Any>,
}
impl Table {
fn new() -> Self {
Self {
data_map: HashMap::new(),
indexes: Vec::new(),
}
}
}
pub struct Db {
name: String,
file: File,
tables: HashMap<TypeId, Table>,
}
impl Db {
#[allow(dead_code)]
pub fn new(name: String) -> Self {
let root_folder_path: PathBuf = Path::new("data").to_owned();
let db_filename = &format!("{}.sdb", name.as_str());
match fs::create_dir(&root_folder_path) {
_ => (),
};
let file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(&root_folder_path.join(db_filename))
.unwrap();
Self {
name: name.to_string(),
file,
tables: HashMap::new(),
// indexes: Default::default(),
}
}
#[allow(dead_code)]
pub fn post<T: 'static + serde::ser::Serialize + Sized>(
&mut self,
obj: T,
) -> Result<String, Errors> {
let new_id = Uuid::new_v4().to_string();
let data_location = self.file.metadata().unwrap().len();
self.file.seek(SeekFrom::Start(data_location)).unwrap();
let serialised_value = bincode::serialize(&obj).unwrap();
println!(
"POST: writing to location {:?} value {:?}",
data_location, serialised_value
);
bincode::serialize_into(&mut self.file, &serialised_value).unwrap();
if self.tables.contains_key(&obj.type_id()) == false {
self.tables.insert(obj.type_id(), Table::new());
}
let table = self.tables.get_mut(&obj.type_id()).unwrap();
table.data_map.insert(
new_id.to_string(),
(data_location as u32, serialised_value.len() as u32),
);
Ok(new_id)
}
#[allow(dead_code)]
pub fn get<T: 'static + serde::de::DeserializeOwned>(
&mut self,
id: &String,
) -> Result<T, Errors> {
match self
.tables
.get(&TypeId::of::<T>())
.unwrap()
.data_map
.get(id)
{
Some((position, size)) => {
let offset_position = position + 8;
let offset_size = size;
println!(
"GET: id {} type {:?} position {} size {} offset position {} offset size {}",
id, TypeId::of::<T>(), position, size, offset_position, offset_size,
);
let mut raw_data: Vec<u8> = Vec::with_capacity(*offset_size as usize);
raw_data.resize(*offset_size as usize, 0);
self.file
.seek(SeekFrom::Start(offset_position as u64))
.unwrap();
self.file.read_exact(raw_data.as_mut()).unwrap();
Ok(bincode::deserialize(raw_data.as_slice()).unwrap())
}
_ => Err(Errors::NotFound),
}
}
#[allow(dead_code)]
pub fn nuke(&self) -> Result<(), Errors> {
Err(Errors::NotFound)
}
#[allow(dead_code)]
pub fn delete<T: 'static + serde::de::DeserializeOwned>(
&mut self,
id: &String,
) -> Result<(), Errors> {
match self.tables.get_mut(&TypeId::of::<T>()) {
Some(table) => match table.data_map.remove(id) {
Some(_) => Ok(()),
None => Err(Errors::NotFound),
},
None => Err(Errors::NotFound),
}
}
// #[allow(dead_code)]
// pub fn find<T: serde::de::DeserializeOwned>(
// &mut self,
// predicate: fn(&T) -> bool,
// limit: usize,
// ) -> Result<Option<Vec<T>>, Errors> {
// let matches: Vec<T> = self
// .data_map
// .keys()
// .map(|k| self.get::<T>(k))
// .filter_map(|r| match r {
// Ok(item) => {
// if predicate(&item) {
// Some(item)
// } else {
// None
// }
// }
// _ => None,
// })
// .take(limit)
// .collect();
// return if matches.is_empty() {
// Ok(None)
// } else {
// Ok(Some(matches))
// };
// // Err(Errors::NotFound)
// }
// #[allow(dead_code)]
// pub fn find_one<T: serde::de::DeserializeOwned>(
// &mut self,
// predicate: fn(&T) -> bool,
// ) -> Result<Option<T>, Errors> {
// match self.find(predicate, 1) {
// Ok(items_option) => match items_option {
// Some(items) => Ok(Some(items.into_iter().nth(0).unwrap())),
// None => Ok(None),
// },
// Err(_) => Err(Errors::NotFound),
// }
// }
}
#[cfg(test)]
mod tests {
use crate::simple_db::*;
use serde::Deserialize;
use serde::Serialize;
use std::fs;
use std::panic;
use uuid::Uuid;
fn seeded_db() -> Db {
Db::new(Uuid::new_v4().to_string())
}
fn nuke_db(db: Db) {
match fs::remove_file(format!("{}.sdb", db.name)) {
_ => (),
};
}
// use crate::simple_db::Crud;
// #[test]
// fn safe_filename() {
// let mut pairs: HashMap<&str, &str> = HashMap::new();
// pairs.insert("test", "test");
// pairs.insert("a1", "a1");
// // pairs.insert("!@£^@!$£", "_");
// pairs.insert("test!@£$123", "test123");
// pairs.insert(std::any::type_name::<str>(), "str");
//
// for (input, expected_output) in pairs {
// let actual_output = to_safe_filename(input);
// assert_eq!(actual_output, expected_output);
// }
// }
#[test]
fn create_db() {
let db = seeded_db();
nuke_db(db);
}
#[test]
fn post() {
let mut db = seeded_db();
db.post("hello").ok().unwrap();
nuke_db(db);
}
#[test]
fn get() {
let mut db = seeded_db();
let id = db.post::<String>("hello".to_string()).ok().unwrap();
let actual = db.get::<String>(&id).ok().unwrap();
assert_eq!(actual, "hello");
nuke_db(db);
}
#[test]
fn multiple_get() {
let mut db = seeded_db();
let id1 = db.post::<String>("hello1".to_string()).ok().unwrap();
let actual1 = db.get::<String>(&id1).ok().unwrap();
assert_eq!(actual1, "hello1");
let id2 = db.post::<String>("hello2".to_string()).ok().unwrap();
let actual2 = db.get::<String>(&id2).ok().unwrap();
assert_eq!(actual2, "hello2");
nuke_db(db);
}
#[test]
fn delete() {
let mut db = seeded_db();
let id = db.post::<String>("hello".to_string()).ok().unwrap();
assert!(db.get::<String>(&id).is_ok());
db.delete::<String>(&id).ok().unwrap();
assert!(db.get::<String>(&id).is_err());
nuke_db(db);
}
#[test]
fn delete_non_existing_id() {
let mut db = seeded_db();
let result = db.delete::<String>(&"made_up".to_string());
assert!(result.is_err());
nuke_db(db);
}
#[test]
fn complex_object_workflow() {
#[derive(Serialize, Deserialize, PartialEq, Debug)]
struct Complex {
name: String,
x: i32,
}
let complex = Complex {
name: "Stefano".to_string(),
x: 34,
};
let mut db = seeded_db();
let id = db.post(complex).ok().unwrap();
let retrieved_complex = db.get::<Complex>(&id).ok().unwrap();
assert_eq!(retrieved_complex.name, "Stefano");
assert_eq!(retrieved_complex.x, 34);
db.delete::<Complex>(&id).ok().unwrap();
assert!(db.get::<Complex>(&id).is_err());
nuke_db(db);
}
// #[test]
// fn find() {
// let mut db = seeded_db();
// db.post::<String>("hello".to_string()).ok().unwrap();
// let actual = db
// .find_one::<String>(|x: &String| x.starts_with("hell"))
// .ok()
// .unwrap()
// .unwrap();
// assert_eq!(actual, "hello");
// nuke_db(db);
// }
//
// #[test]
// fn not_found() {
// let mut db = seeded_db();
// db.post::<String>("hello".to_string()).ok().unwrap();
// let actual = db
// .find_one::<String>(|x: &String| x.starts_with("hellllooo"))
// .ok()
// .unwrap();
// assert!(actual.is_none());
// nuke_db(db);
// }
}
}
|
use std::io::{Read, Write};
use std::net::{TcpStream, TcpListener};
const LOCAL: &str = "127.0.0.1:6000";
const MSG_SIZE: usize = 16;
fn read_message(mut stream: &TcpStream) -> Result<String, &'static str> {
let mut buff = vec![0; MSG_SIZE];
match stream.read(&mut buff) {
Ok(_) => {
let message = buff.into_iter().take_while(|&x| x != 0).collect::<Vec<_>>();
let message = String::from_utf8(message).expect("Invalid UTF8");
Ok(message)
},
Err(_) => Err("Couldn't read the stream"),
}
}
fn handle_connection(mut stream: TcpStream) {
let message = read_message(&stream).expect("Message couldn't not be read");
let addr = stream.peer_addr()
.expect("Couldn't get peer address");
println!("{}: {}", addr, message);
if message == String::from("Hello There!\n") {
stream.write(b"General Kenoby!\r\n")
.expect("Couldn't write message to stream");
} else {
stream.write(b"You are not general Kenoby!\r\n")
.expect("Couldn't write message to stream");
}
}
fn main() {
let listener = TcpListener::bind(LOCAL).unwrap();
for stream in listener.incoming() {
match stream {
Ok(stream) => {
handle_connection(stream);
},
Err(_) => println!("Connection error"),
}
}
}
|
use pelite;
use pelite::pe64::*;
use pelite::pattern as pat;
pub fn print(bin: PeFile<'_>, dll_name: &str) {
println!("## Miscellaneous\n\n```");
header(bin);
game_version(bin);
entity_list(bin, dll_name);
local_entity_handle(bin, dll_name);
global_vars(bin, dll_name);
player_resource(bin, dll_name);
view_render(bin, dll_name);
println!("```\n");
}
fn header(bin: PeFile<'_>) {
// Check if offsets are correct
println!("TimeDateStamp = {:#x}", bin.file_header().TimeDateStamp);
println!("CheckSum = {:#x}", bin.optional_header().CheckSum);
}
fn entity_list(bin: PeFile<'_>, dll_name: &str) {
// EntityList
//
// Find GetEntityByIndex:
// "Index must be less than %i.\n"
//
// entity_ptr = *(uintptr_t*)(entity_list + index * 32)
let mut save = [0; 4];
if bin.scanner().finds_code(pat!("81F9u4 7C% 85C9 79% 4863C1 488D15$'"), &mut save) {
let num_ent_entries = save[1];
let cl_entitylist = save[2];
println!("NUM_ENT_ENTRIES = {:#x}", num_ent_entries);
println!("{}!{:#x} cl_entitylist", dll_name, cl_entitylist);
}
else {
eprintln!("unable to find cl_entitylist!");
}
}
fn local_entity_handle(bin: PeFile<'_>, dll_name: &str) {
let mut save = [0; 4];
if bin.scanner().finds_code(pat!("833D${?'}FF 74? 0FB70D${'} 0FB705${'}"), &mut save) {
let local_entity_handle = save[1];
println!("{}!{:#x} LocalEntityHandle", dll_name, local_entity_handle);
}
else {
eprintln!("unable to find LocalEntityHandle!");
}
}
fn global_vars(bin: PeFile<'_>, dll_name: &str) {
// Right above "Client.dll Init_PostVideo() in library "
// lea r8, qword_XXX
let mut save = [0; 4];
if bin.scanner().finds_code(pat!("488B01 4C8D05${'} [17] $\"Client.dll Init_PostVideo\""), &mut save) {
let global_vars = save[1];
println!("{}!{:#x} GlobalVars", dll_name, global_vars);
}
else {
eprintln!("unable to find GlobalVars!");
}
}
fn player_resource(bin: PeFile<'_>, dll_name: &str) {
// References "#UNCONNECTED_PLAYER_NAME" and the C_PlayerResource vtable
// At the very end of the constructor assigns this to global variable
let mut save = [0; 4];
if bin.scanner().finds_code(pat!("488B6C24? 488BC3 48891D$'"), &mut save) {
let player_resource = save[1];
println!("{}!{:#x} PlayerResource", dll_name, player_resource);
}
else {
eprintln!("unable to find PlayerResource!");
}
}
fn game_version(bin: PeFile<'_>) {
// References "gameversion.txt"
let mut save = [0; 4];
if bin.scanner().finds_code(pat!("488D1D${'} C605????01 488BD3 488D0D$\"gameversion.txt\"00"), &mut save) {
let game_version = bin.derva_c_str(save[1]).unwrap().to_str().unwrap();
println!("GameVersion = {:?}", game_version);
}
else {
eprintln!("unable to find GameVersion!");
}
}
fn view_render(bin: PeFile<'_>, dll_name: &str) {
let mut save = [0; 4];
if bin.scanner().finds_code(pat!("74 34 48 8B 0D ${'} 40 0F B6 D7"), &mut save) {
let view_render = save[1];
println!("{}!{:#x} ViewRender", dll_name, view_render);
}
else {
eprintln!("unable to find ViewRender");
}
}
|
#![allow(non_snake_case)]
fn main() {
println!("Salut Rust");
}
|
// Copyright 2023 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use sha2::Digest;
use sha2::Sha256;
const RESULT_CACHE_PREFIX: &str = "_result_cache";
#[inline(always)]
pub fn gen_result_cache_key(raw: &str) -> String {
format!("{:x}", Sha256::digest(raw))
}
#[inline(always)]
pub fn gen_result_cache_meta_key(tenant: &str, key: &str) -> String {
format!("{RESULT_CACHE_PREFIX}/{tenant}/{key}")
}
#[inline(always)]
pub fn gen_result_cache_prefix(tenant: &str) -> String {
format!("{RESULT_CACHE_PREFIX}/{tenant}/")
}
#[inline(always)]
pub(crate) fn gen_result_cache_dir(key: &str) -> String {
format!("{RESULT_CACHE_PREFIX}/{key}")
}
#[derive(serde::Serialize, serde::Deserialize)]
pub struct ResultCacheValue {
/// The original query SQL.
pub sql: String,
/// Associated query id
pub query_id: String,
/// The query time.
pub query_time: u64,
/// Time-to-live of this query.
pub ttl: u64,
/// The size of the result cache (bytes).
pub result_size: usize,
/// The number of rows in the result cache.
pub num_rows: usize,
/// The sha256 of the partitions for each table in the query.
pub partitions_shas: Vec<String>,
/// The location of the result cache file.
pub location: String,
}
|
use cid::Cid;
use crate::pb::{FlatUnixFs, PBLink, UnixFs, UnixFsType};
use alloc::borrow::Cow;
use core::fmt;
use quick_protobuf::{MessageWrite, Writer};
use sha2::{Digest, Sha256};
/// File tree builder. Implements [`core::default::Default`] which tracks the recent defaults.
///
/// Custom file tree builder can be created with [`FileAdder::builder()`] and configuring the
/// chunker and collector.
///
/// Current implementation maintains an internal buffer for the block creation and uses a
/// non-customizable hash function to produce Cid version 0 links. Currently does not support
/// inline links.
#[derive(Default)]
pub struct FileAdder {
chunker: Chunker,
collector: Collector,
block_buffer: Vec<u8>,
// all unflushed links as a flat vec; this is compacted as we grow and need to create a link
// block for the last N blocks, as decided by the collector.
// FIXME: this is a cause of likely "accidentally quadratic" behavior visible when adding a
// large file and using a minimal chunk size. Could be that this must be moved to Collector to
// help collector (or layout) to decide how this should be persisted.
unflushed_links: Vec<Link>,
}
impl fmt::Debug for FileAdder {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
fmt,
"FileAdder {{ chunker: {:?}, block_buffer: {}/{}, unflushed_links: {} }}",
self.chunker,
self.block_buffer.len(),
self.block_buffer.capacity(),
LinkFormatter(&self.unflushed_links),
)
}
}
struct LinkFormatter<'a>(&'a [Link]);
impl fmt::Display for LinkFormatter<'_> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut iter = self.0.iter().peekable();
write!(fmt, "[")?;
let mut current = match iter.peek() {
Some(Link { depth, .. }) => depth,
None => return write!(fmt, "]"),
};
let mut count = 0;
for Link {
depth: next_depth, ..
} in iter
{
if current == next_depth {
count += 1;
} else {
write!(fmt, "{}: {}/", current, count)?;
let steps_between = if current > next_depth {
current - next_depth
} else {
next_depth - current
};
for _ in 0..steps_between - 1 {
write!(fmt, "0/")?;
}
count = 1;
current = next_depth;
}
}
write!(fmt, "{}: {}]", current, count)
}
}
/// Represents an intermediate structure which will be serialized into link blocks as both PBLink
/// and UnixFs::blocksize. Also holds `depth`, which helps with compaction of the link blocks.
struct Link {
/// Depth of this link. Zero is leaf, and anything above it is, at least for
/// [`BalancedCollector`], the compacted link blocks.
depth: usize,
/// The link target
target: Cid,
/// Total size is dag-pb specific part of the link: aggregated size of the linked subtree.
total_size: u64,
/// File size is the unixfs specific blocksize for this link. In UnixFs link blocks, there is a
/// UnixFs::blocksizes item for each link.
file_size: u64,
}
impl fmt::Debug for Link {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.debug_struct("Link")
.field("depth", &self.depth)
.field("target", &format_args!("{}", self.target))
.field("total_size", &self.total_size)
.field("file_size", &self.file_size)
.finish()
}
}
/// Convenience type to facilitate configuring [`FileAdder`]s.
#[derive(Default)]
pub struct FileAdderBuilder {
chunker: Chunker,
collector: Collector,
}
impl FileAdderBuilder {
/// Configures the builder to use the given chunker.
pub fn with_chunker(self, chunker: Chunker) -> Self {
FileAdderBuilder { chunker, ..self }
}
/// Configures the builder to use the given collector or layout.
pub fn with_collector(self, collector: impl Into<Collector>) -> Self {
FileAdderBuilder {
collector: collector.into(),
..self
}
}
/// Returns a new FileAdder
pub fn build(self) -> FileAdder {
let FileAdderBuilder { chunker, collector } = self;
FileAdder {
chunker,
collector,
..Default::default()
}
}
}
impl FileAdder {
/// Returns a [`FileAdderBuilder`] for creating a non-default FileAdder.
pub fn builder() -> FileAdderBuilder {
FileAdderBuilder::default()
}
/// Returns the likely amount of buffering the file adding will work with best.
///
/// When using the size based chunker and input larger than or equal to the hint is `push()`'ed
/// to the chunker, the internal buffer will not be used.
pub fn size_hint(&self) -> usize {
self.chunker.size_hint()
}
/// Called to push new file bytes into the tree builder.
///
/// Returns the newly created blocks (at most 2) and their respective Cids, and the amount of
/// `input` consumed.
pub fn push(&mut self, input: &[u8]) -> (impl Iterator<Item = (Cid, Vec<u8>)>, usize) {
let (accepted, ready) = self.chunker.accept(input, &self.block_buffer);
if self.block_buffer.is_empty() && ready {
// save single copy as the caller is giving us whole chunks.
//
// TODO: though, this path does make one question if there is any point in keeping
// block_buffer and chunker here; perhaps FileAdder should only handle pre-chunked
// blocks and user takes care of chunking (and buffering)?
//
// cat file | my_awesome_chunker | my_brilliant_collector
let leaf = Self::flush_buffered_leaf(accepted, &mut self.unflushed_links, false);
assert!(leaf.is_some(), "chunk completed, must produce a new block");
self.block_buffer.clear();
let links = self.flush_buffered_links(false);
(leaf.into_iter().chain(links.into_iter()), accepted.len())
} else {
// slower path as we manage the buffer.
if self.block_buffer.capacity() == 0 {
// delay the internal buffer creation until this point, as the caller clearly wants
// to use it.
self.block_buffer.reserve(self.size_hint());
}
self.block_buffer.extend_from_slice(accepted);
let written = accepted.len();
let (leaf, links) = if !ready {
// a new block did not become ready, which means we couldn't have gotten a new cid.
(None, Vec::new())
} else {
// a new leaf must be output, as well as possibly a new link block
let leaf = Self::flush_buffered_leaf(
self.block_buffer.as_slice(),
&mut self.unflushed_links,
false,
);
assert!(leaf.is_some(), "chunk completed, must produce a new block");
self.block_buffer.clear();
let links = self.flush_buffered_links(false);
(leaf, links)
};
(leaf.into_iter().chain(links.into_iter()), written)
}
}
/// Called after the last [`FileAdder::push`] to finish the tree construction.
///
/// Returns a list of Cids and their respective blocks.
///
/// Note: the API will hopefully evolve in a direction which will not allocate a new Vec for
/// every block in the near-ish future.
pub fn finish(mut self) -> impl Iterator<Item = (Cid, Vec<u8>)> {
let last_leaf =
Self::flush_buffered_leaf(&self.block_buffer, &mut self.unflushed_links, true);
let root_links = self.flush_buffered_links(true);
// should probably error if there is neither?
last_leaf.into_iter().chain(root_links.into_iter())
}
/// Returns `None` when the input is empty but there are links, otherwise a new Cid and a
/// block.
fn flush_buffered_leaf(
input: &[u8],
unflushed_links: &mut Vec<Link>,
finishing: bool,
) -> Option<(Cid, Vec<u8>)> {
if input.is_empty() && (!finishing || !unflushed_links.is_empty()) {
return None;
}
// for empty unixfs file the bytes is missing but filesize is present.
let data = if !input.is_empty() {
Some(Cow::Borrowed(input))
} else {
None
};
let filesize = Some(input.len() as u64);
let inner = FlatUnixFs {
links: Vec::new(),
data: UnixFs {
Type: UnixFsType::File,
Data: data,
filesize,
// no blocksizes as there are no links
..Default::default()
},
};
let (cid, vec) = render_and_hash(&inner);
let total_size = vec.len();
let link = Link {
depth: 0,
target: cid.clone(),
total_size: total_size as u64,
file_size: input.len() as u64,
};
unflushed_links.push(link);
Some((cid, vec))
}
fn flush_buffered_links(&mut self, finishing: bool) -> Vec<(Cid, Vec<u8>)> {
self.collector
.flush_links(&mut self.unflushed_links, finishing)
}
/// Test helper for collecting all of the produced blocks; probably not a good idea outside
/// smaller test cases. When `amt` is zero, the whole content is processed at the speed of
/// chunker, otherwise `all_content` is pushed at `amt` sized slices with the idea of catching
/// bugs in chunkers.
#[cfg(test)]
fn collect_blocks(mut self, all_content: &[u8], mut amt: usize) -> Vec<(Cid, Vec<u8>)> {
let mut written = 0;
let mut blocks_received = Vec::new();
if amt == 0 {
amt = all_content.len();
}
while written < all_content.len() {
let end = written + (all_content.len() - written).min(amt);
let slice = &all_content[written..end];
let (blocks, pushed) = self.push(slice);
blocks_received.extend(blocks);
written += pushed;
}
let last_blocks = self.finish();
blocks_received.extend(last_blocks);
blocks_received
}
}
fn render_and_hash(flat: &FlatUnixFs<'_>) -> (Cid, Vec<u8>) {
// TODO: as shown in later dagger we don't really need to render the FlatUnixFs fully; we could
// either just render a fixed header and continue with the body OR links, though the links are
// a bit more complicated.
let mut out = Vec::with_capacity(flat.get_size());
let mut writer = Writer::new(&mut out);
flat.write_message(&mut writer)
.expect("unsure how this could fail");
let mh = multihash::wrap(multihash::Code::Sha2_256, &Sha256::digest(&out));
let cid = Cid::new_v0(mh).expect("sha2_256 is the correct multihash for cidv0");
(cid, out)
}
/// Chunker strategy
#[derive(Debug, Clone)]
pub enum Chunker {
/// Size based chunking
Size(usize),
}
impl Default for Chunker {
/// Returns a default chunker which matches go-ipfs 0.6
fn default() -> Self {
Chunker::Size(256 * 1024)
}
}
impl Chunker {
fn accept<'a>(&mut self, input: &'a [u8], buffered: &[u8]) -> (&'a [u8], bool) {
use Chunker::*;
match self {
Size(max) => {
let l = input.len().min(*max - buffered.len());
let accepted = &input[..l];
let ready = buffered.len() + l >= *max;
(accepted, ready)
}
}
}
fn size_hint(&self) -> usize {
use Chunker::*;
match self {
Size(max) => *max,
}
}
}
/// Collector or layout strategy. For more information, see the [Layout section of the spec].
/// Currently only the default balanced collector/layout has been implemented.
///
/// [Layout section of the spec]: https://github.com/ipfs/specs/blob/master/UNIXFS.md#layout
#[derive(Debug, Clone)]
pub enum Collector {
/// Balanced trees.
Balanced(BalancedCollector),
}
impl Default for Collector {
fn default() -> Self {
Collector::Balanced(Default::default())
}
}
impl Collector {
fn flush_links(&mut self, pending: &mut Vec<Link>, finishing: bool) -> Vec<(Cid, Vec<u8>)> {
use Collector::*;
match self {
Balanced(bc) => bc.flush_links(pending, finishing),
}
}
}
/// BalancedCollector creates balanced UnixFs trees, most optimized for random access to different
/// parts of the file. Currently supports only link count threshold or the branching factor.
#[derive(Clone)]
pub struct BalancedCollector {
branching_factor: usize,
// reused between link block generation
reused_links: Vec<PBLink<'static>>,
// reused between link block generation
reused_blocksizes: Vec<u64>,
}
impl fmt::Debug for BalancedCollector {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
fmt,
"BalancedCollector {{ branching_factor: {} }}",
self.branching_factor
)
}
}
impl Default for BalancedCollector {
/// Returns a default collector which matches go-ipfs 0.6
///
/// The origin for 174 is not described in the the [specs], but has likely to do something
/// with being "good enough" regarding prefetching when reading and allows reusing some of the
/// link blocks if parts of a longer file change.
///
/// [specs]: https://github.com/ipfs/specs/blob/master/UNIXFS.md
fn default() -> Self {
Self::with_branching_factor(174)
}
}
impl From<BalancedCollector> for Collector {
fn from(b: BalancedCollector) -> Self {
Collector::Balanced(b)
}
}
impl BalancedCollector {
/// Configure Balanced collector with the given branching factor.
pub fn with_branching_factor(branching_factor: usize) -> Self {
assert!(branching_factor > 0);
Self {
branching_factor,
reused_links: Vec::new(),
reused_blocksizes: Vec::new(),
}
}
/// In-place compression of the `pending` links to a balanced hierarchy. When `finishing`, the
/// links will be compressed iteratively from the lowest level to produce a single root link
/// block.
fn flush_links(&mut self, pending: &mut Vec<Link>, finishing: bool) -> Vec<(Cid, Vec<u8>)> {
/*
file |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -|
links-0 |-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|E|F|G|
links-1 |-------|-------|-------|-------|-B-----|-C-----|-D-----|\ /
links-2 |-A-----------------------------| ^^^
^ one short
\--- link.depth
pending [A, B, C, D, E, F, G]
#flush_buffered_links(...) first iteration:
file |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -|
links-0 |-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|E|F|G|
links-1 |-------|-------|-------|-------|-B-----|-C-----|-D-----|=#1==|
links-2 |-A-----------------------------|
pending [A, B, C, D, E, F, G] => [A, B, C, D, 1]
new link block #1 is created for E, F, and G.
#flush_buffered_links(...) second iteration:
file |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -|
links-0 |-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|
links-1 |-------|-------|-------|-------|-B-----|-C-----|-D-----|-#1--|
links-2 |-A-----------------------------|=========================#2==|
pending [A, B, C, D, 1] => [A, 2]
new link block #2 is created for B, C, D, and #1.
#flush_buffered_links(...) last iteration:
file |- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -|
links-0 |-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|-|
links-1 |-------|-------|-------|-------|-------|-------|-------|-#1--|
links-2 |-A-----------------------------|-------------------------#2--|
links-3 |=========================================================#3==|
pending [A, 2] => [3]
new link block #3 is created for A, and #2. (the root block)
*/
let mut ret = Vec::new();
let mut reused_links = core::mem::take(&mut self.reused_links);
let mut reused_blocksizes = core::mem::take(&mut self.reused_blocksizes);
if let Some(need) = self.branching_factor.checked_sub(reused_links.capacity()) {
reused_links.reserve(need);
}
if let Some(need) = self
.branching_factor
.checked_sub(reused_blocksizes.capacity())
{
reused_blocksizes.reserve(need);
}
'outer: for level in 0.. {
if pending.len() == 1 && finishing
|| pending.len() <= self.branching_factor && !finishing
{
// when there is just a single linking block left and we are finishing, we are
// done. It might not be part of the `ret` as will be the case with single chunk
// files for example.
//
// normally when not finishing we do nothing if we don't have enough links.
break;
}
// when finishing, we iterate the level to completion in blocks of
// self.branching_factor and *insert* values at the offset of the first compressed
// link. on following iterations this will be the index after the higher level index.
let mut starting_point = 0;
// when creating the link blocks, start overwriting the pending links at the first
// found link for this depth. this index will be incremented for successive link
// blocks.
let mut last_overwrite = None;
while let Some(mut first_at) = &pending[starting_point..]
.iter()
.position(|Link { depth, .. }| depth == &level)
{
// fix first_at as absolute index from being possible relative to the
// starting_point
first_at += starting_point;
if !finishing && pending[first_at..].len() <= self.branching_factor {
if let Some(last_overwrite) = last_overwrite {
// drain any processed
pending.drain((last_overwrite + 1)..first_at);
}
break 'outer;
}
reused_links.clear();
reused_blocksizes.clear();
let mut nested_size = 0;
let mut nested_total_size = 0;
let last = (first_at + self.branching_factor).min(pending.len());
for (index, link) in pending[first_at..last].iter().enumerate() {
assert_eq!(
link.depth,
level,
"unexpected link depth {} when searching at level {} index {}",
link.depth,
level,
index + first_at
);
Self::partition_link(
link,
&mut reused_links,
&mut reused_blocksizes,
&mut nested_size,
&mut nested_total_size,
);
}
debug_assert_eq!(reused_links.len(), reused_blocksizes.len());
let inner = FlatUnixFs {
links: reused_links,
data: UnixFs {
Type: UnixFsType::File,
filesize: Some(nested_size),
blocksizes: reused_blocksizes,
..Default::default()
},
};
let (cid, vec) = render_and_hash(&inner);
// start overwriting at the first index of this level, then continue forward on
// next iterations.
let index = last_overwrite.map(|i| i + 1).unwrap_or(first_at);
pending[index] = Link {
depth: level + 1,
target: cid.clone(),
total_size: nested_total_size + vec.len() as u64,
file_size: nested_size,
};
ret.push((cid, vec));
reused_links = inner.links;
reused_blocksizes = inner.data.blocksizes;
starting_point = last;
last_overwrite = Some(index);
}
if let Some(last_overwrite) = last_overwrite {
pending.truncate(last_overwrite + 1);
}
// this holds regardless of finishing; we would had broken 'outer had there been less
// than full blocks left.
debug_assert_eq!(
pending.iter().position(|l| l.depth == level),
None,
"should have no more of depth {}: {}",
level,
LinkFormatter(pending.as_slice())
);
}
self.reused_links = reused_links;
self.reused_blocksizes = reused_blocksizes;
ret
}
/// Each link needs to be partitioned into the four mut arguments received by this function in
/// order to produce the expected UnixFs output.
fn partition_link(
link: &Link,
links: &mut Vec<PBLink<'static>>,
blocksizes: &mut Vec<u64>,
nested_size: &mut u64,
nested_total_size: &mut u64,
) {
links.push(PBLink {
Hash: Some(link.target.to_bytes().into()),
Name: Some("".into()),
Tsize: Some(link.total_size),
});
blocksizes.push(link.file_size);
*nested_size += link.file_size;
*nested_total_size += link.total_size;
}
}
#[cfg(test)]
mod tests {
use super::{BalancedCollector, Chunker, FileAdder};
use crate::test_support::FakeBlockstore;
use cid::Cid;
use core::convert::TryFrom;
use hex_literal::hex;
#[test]
fn test_size_chunker() {
assert_eq!(size_chunker_scenario(1, 4, 0), (1, true));
assert_eq!(size_chunker_scenario(2, 4, 0), (2, true));
assert_eq!(size_chunker_scenario(2, 1, 0), (1, false));
assert_eq!(size_chunker_scenario(2, 1, 1), (1, true));
assert_eq!(size_chunker_scenario(32, 3, 29), (3, true));
// this took some debugging time:
assert_eq!(size_chunker_scenario(32, 4, 29), (3, true));
}
fn size_chunker_scenario(max: usize, input_len: usize, existing_len: usize) -> (usize, bool) {
let input = vec![0; input_len];
let existing = vec![0; existing_len];
let (accepted, ready) = Chunker::Size(max).accept(&input, &existing);
(accepted.len(), ready)
}
#[test]
fn favourite_single_block_file() {
let blocks = FakeBlockstore::with_fixtures();
// everyones favourite content
let content = b"foobar\n";
let mut adder = FileAdder::default();
{
let (mut ready_blocks, bytes) = adder.push(content);
assert!(ready_blocks.next().is_none());
assert_eq!(bytes, content.len());
}
// real impl would probably hash this ... except maybe hashing is faster when done inline?
// or maybe not
let (_, file_block) = adder
.finish()
.next()
.expect("there must have been the root block");
assert_eq!(
blocks.get_by_str("QmRgutAxd8t7oGkSm4wmeuByG6M51wcTso6cubDdQtuEfL"),
file_block.as_slice()
);
}
#[test]
fn favourite_multi_block_file() {
// root should be QmRJHYTNvC3hmd9gJQARxLR1QMEincccBV53bBw524yyq6
let blocks = FakeBlockstore::with_fixtures();
let content = b"foobar\n";
let adder = FileAdder::builder().with_chunker(Chunker::Size(2)).build();
let blocks_received = adder.collect_blocks(content, 0);
// the order here is "fo", "ob", "ar", "\n", root block
// while verifying the root Cid would be *enough* this is easier to eyeball, ... not really
// that much but ...
let expected = [
"QmfVyMoStzTvdnUR7Uotzh82gmL427q9z3xW5Y8fUoszi4",
"QmdPyW4CWE3QBkgjWfjM5f7Tjb3HukxVuBXZtkqAGwsMnm",
"QmNhDQpphvMWhdCzP74taRzXDaEfPGq8vWfFRzD7mEgePM",
"Qmc5m94Gu7z62RC8waSKkZUrCCBJPyHbkpmGzEePxy2oXJ",
"QmRJHYTNvC3hmd9gJQARxLR1QMEincccBV53bBw524yyq6",
]
.iter()
.map(|key| {
let cid = Cid::try_from(*key).unwrap();
let block = blocks.get_by_str(key).to_vec();
(cid, block)
})
.collect::<Vec<_>>();
assert_eq!(blocks_received, expected);
}
#[test]
fn three_layers() {
let content = b"Lorem ipsum dolor sit amet, sit enim montes aliquam. Cras non lorem, \
rhoncus condimentum, irure et ante. Pulvinar suscipit odio ante, et tellus a enim, \
wisi ipsum, vel rhoncus eget faucibus varius, luctus turpis nibh vel odio nulla pede.";
assert!(content.len() > 174 && content.len() < 2 * 174);
// go-ipfs 0.5 result: QmRQ6NZNUs4JrCT2y7tmCC1wUhjqYuTssB8VXbbN3rMffg, 239 blocks and root
// root has two links:
// - QmXUcuLGKc8SCMEqG4wgct6NKsSRZQfvB2FCfjDow1PfpB (174 links)
// - QmeEn8dxWTzGAFKvyXoLj4oWbh9putL4vSw4uhLXJrSZhs (63 links)
//
// in future, if we ever add inline Cid generation this test would need to be changed not
// to use those inline cids or raw leaves
let adder = FileAdder::builder().with_chunker(Chunker::Size(1)).build();
let blocks_received = adder.collect_blocks(content, 0);
assert_eq!(blocks_received.len(), 240);
assert_eq!(
blocks_received.last().unwrap().0.to_string(),
"QmRQ6NZNUs4JrCT2y7tmCC1wUhjqYuTssB8VXbbN3rMffg"
);
}
#[test]
fn three_layers_all_subchunks() {
let content = b"Lorem ipsum dolor sit amet, sit enim montes aliquam. Cras non lorem, \
rhoncus condimentum, irure et ante. Pulvinar suscipit odio ante, et tellus a enim, \
wisi ipsum, vel rhoncus eget faucibus varius, luctus turpis nibh vel odio nulla pede.";
for amt in 1..32 {
let adder = FileAdder::builder().with_chunker(Chunker::Size(32)).build();
let blocks_received = adder.collect_blocks(content, amt);
assert_eq!(
blocks_received.last().unwrap().0.to_string(),
"QmYSLcVQqxKygiq7x9w1XGYxU29EShB8ZemiaQ8GAAw17h",
"amt: {}",
amt
);
}
}
#[test]
fn empty_file() {
let blocks = FileAdder::default().collect_blocks(b"", 0);
assert_eq!(blocks.len(), 1);
// 0a == field dag-pb body (unixfs)
// 04 == dag-pb body len, varint, 4 bytes
// 08 == field type tag, varint, 1 byte
// 02 == field type (File)
// 18 == field filesize tag, varint
// 00 == filesize, varint, 1 byte
assert_eq!(blocks[0].1.as_slice(), &hex!("0a 04 08 02 18 00"));
assert_eq!(
blocks[0].0.to_string(),
"QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH"
);
}
#[test]
fn full_link_block_and_a_byte() {
let buf = vec![0u8; 2];
// this should produce a root with two links
// +----------^---+
// | |
// |----------------------| |-| <-- link blocks
// ^^^^^^^^^^^^^^^^^^^^^^ ^
// 174 blocks \--- 1 block
let branching_factor = 174;
let mut adder = FileAdder::builder()
.with_chunker(Chunker::Size(2))
.with_collector(BalancedCollector::with_branching_factor(branching_factor))
.build();
let mut blocks_count = 0;
for _ in 0..branching_factor {
let (blocks, written) = adder.push(buf.as_slice());
assert_eq!(written, buf.len());
blocks_count += blocks.count();
}
let (blocks, written) = adder.push(&buf[0..1]);
assert_eq!(written, 1);
blocks_count += blocks.count();
let last_blocks = adder.finish().collect::<Vec<_>>();
blocks_count += last_blocks.len();
// chunks == 174
// one link block for 174
// one is for the single byte block
// one is a link block for the singular single byte block
// other is for the root block
assert_eq!(blocks_count, branching_factor + 1 + 1 + 1 + 1);
assert_eq!(
last_blocks.last().unwrap().0.to_string(),
"QmcHNWF1d56uCDSfJPA7t9fadZRV9we5HGSTGSmwuqmMP9"
);
}
#[test]
fn full_link_block() {
let buf = vec![0u8; 1];
let branching_factor = 174;
let mut adder = FileAdder::builder()
.with_chunker(Chunker::Size(1))
.with_collector(BalancedCollector::with_branching_factor(branching_factor))
.build();
let mut blocks_count = 0;
for _ in 0..branching_factor {
let (blocks, written) = adder.push(buf.as_slice());
assert_eq!(written, buf.len());
blocks_count += blocks.count();
}
let mut last_blocks = adder.finish();
// go-ipfs waits until finish to get a single link block, no additional root block
let last_block = last_blocks.next().expect("must not have flushed yet");
blocks_count += 1;
assert_eq!(last_blocks.next(), None);
assert_eq!(
last_block.0.to_string(),
"QmdgQac8c6Bo3MP5bHAg2yQ25KebFUsmkZFvyByYzf8UCB"
);
assert_eq!(blocks_count, 175);
}
}
|
use std::{fmt::Display, ops::Deref};
use super::private::Sealed;
/// A type-level group of booleans.
pub trait Boolean: Sealed {}
/// Witnesses a type that is `true`.
pub trait Truth: Boolean {}
/// Witnesses a type that is `false`.
pub trait Falsity: Boolean {}
/// Type-level boolean corresponding to `true`.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)]
pub struct True;
/// Type-level boolean corresponding to `false`.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)]
pub struct False;
impl Truth for True {}
impl Falsity for False {}
impl Boolean for True {}
impl Boolean for False {}
impl Deref for True {
type Target = bool;
fn deref(&self) -> &Self::Target {
&true
}
}
impl Deref for False {
type Target = bool;
fn deref(&self) -> &Self::Target {
&false
}
}
impl Display for True {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "True")
}
}
impl Display for False {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "False")
}
}
|
#[derive(Debug,Clone,Copy,PartialEq,Eq,Hash)]
pub struct Abilities {
pub _str: isize,
pub _dex: isize,
pub _con: isize,
pub _int: isize,
pub _wis: isize,
pub _cha: isize,
}
#[derive(Debug,Clone,Copy,PartialEq,Eq,Hash)]
pub struct AbilityScores(Abilities);
#[derive(Debug,Clone,Copy,PartialEq,Eq,Hash)]
pub struct AbilityMods(Abilities);
impl From<AbilityScores> for AbilityMods {
fn from(ab: AbilityScores) -> Self {
Self(Abilities {
_str: ((ab.0)._str - 10) / 2,
_dex: ((ab.0)._dex - 10) / 2,
_con: ((ab.0)._con - 10) / 2,
_int: ((ab.0)._int - 10) / 2,
_wis: ((ab.0)._wis - 10) / 2,
_cha: ((ab.0)._cha - 10) / 2,
})
}
}
|
extern crate crossbeam;
use crossbeam::{atomic::AtomicCell, thread};
use std::f64::consts::PI;
use std::thread as sthread;
use std::time::{Duration, SystemTime};
const TARGET: u32 = 10;
const GUESS: u32 = 2500000;
fn computer(
xr: &AtomicCell<f64>,
dr: &AtomicCell<f64>,
ticks: &AtomicCell<u32>,
tocks: &AtomicCell<u32>,
) {
loop {
let mut x = xr.load();
let mut d = dr.load();
let ticks = ticks.load();
for _ in 1..ticks {
x += 1.0 / d;
d += 2.0;
x -= 1.0 / d;
d += 2.0;
}
tocks.fetch_add(1);
xr.store(x);
dr.store(d);
}
}
fn inspector(
xr: &AtomicCell<f64>,
dr: &AtomicCell<f64>,
ticksr: &AtomicCell<u32>,
tocksr: &AtomicCell<u32>,
) {
let mut old_d = 1.0;
let mut now = SystemTime::now();
loop {
sthread::sleep(Duration::from_secs(1));
let x = xr.load();
let d = dr.load();
let tocks = tocksr.load();
let ticks = ticksr.load();
if tocks <= TARGET {
ticksr.store(ticks / 2);
} else if tocks > TARGET {
ticksr.store(ticks + ticks / 10);
}
println!("{:?} {} {} {} {}", now.elapsed().unwrap(), ticks, tocks, d - old_d, PI - 4.0 * x);
tocksr.store(0);
}
}
fn main() {
println!("Atomic crossbeam version");
let x = AtomicCell::new(0.0);
let d = AtomicCell::new(1.0);
let ticks = AtomicCell::new(GUESS);
let tocks = AtomicCell::new(0);
thread::scope(|s| {
s.spawn(|_| {
computer(&x, &d, &ticks, &tocks);
});
inspector(&x, &d, &ticks, &tocks);
})
.unwrap();
}
|
use binrw::{BinRead, BinWrite, BinReaderExt, BinWriterExt, BinResult, io::Cursor};
pub trait BytesDecodeExt: BinRead
where
<Self as BinRead>::Args: Default,
{
fn decode<T: AsRef<[u8]>>(bytes: T) -> BinResult<Self> {
let mut reader = Cursor::new(bytes);
reader.read_be()
}
}
pub trait BytesEncodeExt: BinWrite
where
<Self as BinWrite>::Args: Default,
{
fn encode(&self) -> BinResult<Vec<u8>> {
let mut bytes = Vec::new();
let mut writer = Cursor::new(&mut bytes);
writer.write_be(&self)?;
Ok(bytes)
}
}
impl<T: BinRead> BytesDecodeExt for T
where
<Self as BinRead>::Args: Default,
{}
impl<T: BinWrite> BytesEncodeExt for T
where
<Self as BinWrite>::Args: Default,
{}
#[cfg(test)]
mod test {
use hex_literal::hex;
use binrw::{BinRead, BinWrite};
use super::{BytesDecodeExt, BytesEncodeExt};
#[derive(BinRead, BinWrite)]
struct Example {
a: u64,
b: u32,
c: u16,
d: [u8; 4],
e: i32,
}
const BYTES: [u8; 22] = hex!("0000 017C A687 618D 2232 75F6 5F49 00 01 02 FF FA45 D360");
const A: u64 = 1634881462669;
const B: u32 = 573732342;
const C: u16 = 24393;
const D: [u8; 4] = [0, 1, 2, 255];
const E: i32 = -96087200;
#[test]
fn test_decode() {
let example = Example::decode(BYTES).unwrap();
assert_eq!(example.a, A);
assert_eq!(example.b, B);
assert_eq!(example.c, C);
assert_eq!(example.d, D);
assert_eq!(example.e, E);
}
#[test]
fn test_encode() {
let example = Example { a: A, b: B, c: C, d: D, e: E };
assert_eq!(example.encode().unwrap(), BYTES);
}
}
|
mod p1;
mod p2;
mod p3;
fn main() {
println!("{}", p1::p1(1_000));
println!("{}", p2::p2(4_000_000));
println!("{}", p3::p3(600_851_475_143));
} |
use super::*;
use std::io::{BufRead, BufReader};
use std::path::PathBuf;
// Notes:
// * Particle & constraint vectors are kept private to prevent mismatch between
// zero-based and one-based indexing.
// * These private vectors are zero-based (no dummy element at zero index).
// * Everything here that is called 'num' is a one-based index.
#[derive(Debug, Copy, Clone)]
pub struct Particle {
pub active: bool,
pub pos: Vec2,
pub old_pos: Vec2,
pub velocity: Vec2,
pub force: Vec2,
pub one_over_mass: f32,
pub timestep: f32,
pub gravity: f32,
pub e_damping: f32,
pub v_damping: f32,
}
#[derive(Debug, Default, Copy, Clone)]
pub struct Constraint {
pub active: bool,
pub particle_num: (usize, usize),
pub rest_length: f32,
}
#[derive(Debug, Default, Clone)]
pub struct ParticleSystem {
particles: Vec<Particle>,
constraints: Vec<Constraint>,
}
impl Default for Particle {
fn default() -> Particle {
Particle {
active: false,
pos: Vec2::ZERO,
old_pos: Vec2::ZERO,
velocity: Vec2::ZERO,
force: Vec2::ZERO,
one_over_mass: 0.0,
timestep: 0.0,
gravity: 0.0,
e_damping: 0.0,
v_damping: 0.0,
}
}
}
impl Particle {
pub fn euler(&mut self) {
self.old_pos = self.pos;
self.force.y += self.gravity;
self.velocity += self.force * self.one_over_mass * self.timestep.powi(2);
self.pos += self.velocity;
self.velocity *= self.e_damping;
self.force = Vec2::ZERO;
}
pub fn verlet(&mut self) {
let a = self.pos * (1.0 + self.v_damping);
let b = self.old_pos * self.v_damping;
self.old_pos = self.pos;
self.force.y += self.gravity;
self.pos = a - b + self.force * self.one_over_mass * self.timestep.powi(2);
self.force = Vec2::ZERO;
}
}
impl Constraint {
pub fn new(a_num: usize, b_num: usize, rest_length: f32) -> Constraint {
Constraint {
active: true,
particle_num: (a_num, b_num),
rest_length,
}
}
}
impl ParticleSystem {
#[allow(dead_code)]
pub fn new() -> ParticleSystem {
Default::default()
}
pub fn active(&self, particle_num: usize) -> bool {
self.particles[particle_num - 1].active
}
pub fn pos(&self, particle_num: usize) -> Vec2 {
self.particles[particle_num - 1].pos
}
pub fn pos_mut(&mut self, particle_num: usize) -> &mut Vec2 {
&mut self.particles[particle_num - 1].pos
}
pub fn old_pos(&self, particle_num: usize) -> Vec2 {
self.particles[particle_num - 1].old_pos
}
pub fn old_pos_mut(&mut self, particle_num: usize) -> &mut Vec2 {
&mut self.particles[particle_num - 1].old_pos
}
pub fn particles(&self) -> &[Particle] {
&self.particles
}
pub fn constraints(&self) -> &[Constraint] {
&self.constraints
}
pub fn do_verlet_timestep(&mut self) {
for particle in self.particles.iter_mut() {
if particle.active {
particle.verlet();
}
}
self.satisfy_constraints();
}
pub fn do_verlet_timestep_for(&mut self, particle_num: usize, constraint_num: usize) {
self.particles[particle_num - 1].verlet();
self.satisfy_constraint_for(constraint_num);
}
#[allow(dead_code)]
pub fn do_eurler_timestep(&mut self) {
for particle in self.particles.iter_mut() {
if particle.active {
particle.euler();
}
}
}
#[allow(dead_code)]
pub fn do_eurler_timestep_for(&mut self, particle_num: usize) {
self.particles[particle_num - 1].euler()
}
pub fn satisfy_constraints(&mut self) {
for constraint in self.constraints.iter() {
if constraint.active {
Self::satisfy_constraint(constraint, &mut self.particles);
}
}
}
pub fn satisfy_constraint_for(&mut self, constraint_num: usize) {
Self::satisfy_constraint(&self.constraints[constraint_num - 1], &mut self.particles);
}
fn satisfy_constraint(constraint: &Constraint, particles: &mut [Particle]) {
let (a, b) = (constraint.particle_num.0 - 1, constraint.particle_num.1 - 1);
let delta = particles[b].pos - particles[a].pos;
let length = delta.length();
if length > 0.0 {
let diff = (length - constraint.rest_length) / length;
if particles[a].one_over_mass > 0.0 {
particles[a].pos += delta * diff / 2.0;
}
if particles[b].one_over_mass > 0.0 {
particles[b].pos -= delta * diff / 2.0;
}
}
}
pub fn load_from_file(
fs: &mut Filesystem,
file_name: &str,
scale: f32,
timestep: f32,
gravity: f32,
e_damping: f32,
v_damping: f32,
) -> ParticleSystem {
let mut path = PathBuf::from("objects/");
path.push(file_name);
let file = fs.open(&path).expect("Error opening object file.");
let mut line = String::new();
let mut buf = BufReader::new(file);
let mut particles: Vec<Particle> = Vec::new();
let mut constraints: Vec<Constraint> = Vec::new();
let read_line = |buf: &mut BufReader<File>, line: &mut String| {
line.clear();
buf.read_line(line).ok();
};
let read_f32 = |buf: &mut BufReader<File>, line: &mut String| -> f32 {
read_line(buf, line);
line.trim().parse().unwrap()
};
let read_index = |line: &str| -> usize {
let mut chars = line.chars();
chars.next();
chars.as_str().trim().parse().unwrap()
};
read_line(&mut buf, &mut line);
while line.trim() != "CONSTRAINTS" {
let x = read_f32(&mut buf, &mut line);
let _ = read_f32(&mut buf, &mut line);
let z = read_f32(&mut buf, &mut line);
let p = vec2(-x * scale / 1.2, -z * scale);
particles.push(Particle {
active: true,
pos: p,
old_pos: p,
velocity: Vec2::ZERO,
force: Vec2::ZERO,
one_over_mass: 1.0,
timestep,
gravity,
e_damping,
v_damping,
});
read_line(&mut buf, &mut line);
}
loop {
let pa_num = {
read_line(&mut buf, &mut line);
if line.is_empty() || line.trim() == "ENDFILE" {
break;
}
read_index(&line)
};
let pb_num = {
read_line(&mut buf, &mut line);
read_index(&line)
};
let delta = particles[pa_num - 1].pos - particles[pb_num - 1].pos;
constraints.push(Constraint::new(pa_num, pb_num, delta.length()));
}
ParticleSystem {
particles,
constraints,
}
}
}
|
//#![allow(warnings, unused_variables, dead_code, improper_ctypes, non_camel_case_types, non_snake_case, non_upper_case_globals)]
use core::alloc::{GlobalAlloc, Layout};
use core::fmt;
use core::ptr;
//use crate::mutex::Mutex;
use ::os::kernel_malloc;
pub struct FreebsdAllocator;
/// `LocalAlloc` is an analogous trait to the standard library's `GlobalAlloc`,
/// but it takes `&mut self` in `alloc()` and `dealloc()`.
pub trait LocalAlloc {
unsafe fn alloc(&mut self, layout: Layout) -> *mut u8;
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout);
}
unsafe impl GlobalAlloc for FreebsdAllocator {
unsafe fn alloc(&self, _layout: Layout)-> *mut u8 {
ptr::null_mut()
}
unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) {
panic!("hello")
}
}
|
pub mod scan_sheet_elements;
pub mod scan_sheet_layout;
use crate::make::scan_sheet_layout::{HighLevelPageDescription, HighLevelField, HighLevelKind};
pub fn dummy() -> HighLevelPageDescription {
let page_description = HighLevelPageDescription {
document_title: String::from("test1"),
fields: vec![
HighLevelField {
kind: HighLevelKind::Boolean,
descriptor: String::from("boom"),
},
HighLevelField {
kind: HighLevelKind::Boolean,
descriptor: String::from("another"),
},
HighLevelField {
kind: HighLevelKind::SevenSegmentDisplay(2),
descriptor: String::from("another one"),
},
HighLevelField {
kind: HighLevelKind::Boolean,
descriptor: String::from("hardcore"),
},
]
};
page_description
}
|
fn main() {
let contents = std::fs::read_to_string("./res/input.txt").expect("Could not read file!");
let lines: Vec<&str> = contents.lines().collect();
let mut x_axis: usize = 0;
let mut y_axis: usize = 0;
let line_length = lines[0].len();
let mut count = 0;
loop {
x_axis += 3;
y_axis += 1;
if lines.get(y_axis).unwrap().chars().nth(x_axis % line_length).unwrap() == '#'{
count += 1;
}
if y_axis == lines.len() - 1 {
break;
}
}
println!("{}", count);
}
|
//! UDP echo server.
//!
//! To send messages do:
//! ```sh
//! $ nc -u localhost 8080
//! ```
use runtime::net::UdpSocket;
#[runtime::main]
async fn main() -> std::io::Result<()> {
let mut socket = UdpSocket::bind("127.0.0.1:8080")?;
let mut buf = vec![0u8; 1024];
println!("Listening on {}", socket.local_addr()?);
loop {
let (recv, peer) = socket.recv_from(&mut buf).await?;
let sent = socket.send_to(&buf[..recv], &peer).await?;
println!("Sent {} out of {} bytes to {}", sent, recv, peer);
}
}
|
use file_reader;
const INPUT_FILENAME: &str = "input.txt";
const ROW_MAX: u32 = 127;
const COL_MAX: u32 = 7;
const NUM_ROW_CHARS: usize = 7;
fn main() {
let input_str = match file_reader::file_to_vec(INPUT_FILENAME) {
Err(_) => {
println!("Couldn't turn file into vec!");
return;
},
Ok(v) => v,
};
let input: Vec<u32> =
input_str.into_iter().map(process_input).collect();
let result = input.into_iter().max().unwrap();
println!("{:?}", result);
}
fn process_input(input: String) -> u32 {
let rows = &input[0..NUM_ROW_CHARS];
let cols = &input[NUM_ROW_CHARS..];
let mut row_max = ROW_MAX;
let mut row_min = 0;
let mut col_max = COL_MAX;
let mut col_min = 0;
for c in rows.chars() {
match c {
'F' => row_max = (row_min + row_max - 1) / 2,
'B' => row_min = (row_min + row_max + 1) / 2,
_ => println!("Bad input"),
};
}
for c in cols.chars() {
match c {
'L' => col_max = (col_min + col_max - 1) / 2,
'R' => col_min = (col_min + col_max + 1) / 2,
_ => println!("Bad input"),
};
}
let row = row_max;
let col = col_max;
(row << 3) + col
}
|
/*
给定一个二叉树,返回其节点值的锯齿形层次遍历。(即先从左往右,再从右往左进行下一层遍历,以此类推,层与层之间交替进行)。
例如:
给定二叉树 [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
返回锯齿形层次遍历如下:
[
[3],
[20,9],
[15,7]
]
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/binary-tree-zigzag-level-order-traversal
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
*/
use std::cell::RefCell;
use std::collections::VecDeque;
use std::rc::Rc;
// Definition for a binary tree node.
#[derive(Debug, PartialEq, Eq)]
pub struct TreeNode {
pub val: i32,
pub left: Option<Rc<RefCell<TreeNode>>>,
pub right: Option<Rc<RefCell<TreeNode>>>,
}
impl TreeNode {
#[inline]
pub fn new(val: i32) -> Self {
TreeNode {
val,
left: None,
right: None,
}
}
}
impl Solution {
pub fn zigzag_level_order(root: Option<Rc<RefCell<TreeNode>>>) -> Vec<Vec<i32>> {
if root.is_none() {
return vec![];
}
let mut v = VecDeque::new();
v.push_back(root);
v.push_back(None);
let mut res = vec![];
let mut temp = vec![];
let mut temp_vec = VecDeque::new();
let mut left2right = true;
loop {
match v.pop_front().unwrap() {
Some(n) => {
temp.push(n.borrow().val);
let left = std::mem::replace(&mut n.borrow_mut().left, None);
let right = std::mem::replace(&mut n.borrow_mut().right, None);
if left2right {
if left.is_some() {
temp_vec.push_back(left);
}
if right.is_some() {
temp_vec.push_back(right);
}
} else {
if right.is_some() {
temp_vec.push_back(right);
}
if left.is_some() {
temp_vec.push_back(left);
}
}
}
None => {
res.push(std::mem::replace(&mut temp, vec![]));
if temp_vec.is_empty() {
break;
}
while let Some(node) = temp_vec.pop_back() {
v.push_back(node);
}
left2right = !left2right;
v.push_back(None)
}
}
}
res
}
}
fn main() {
let mut root = TreeNode::new(1);
let mut l1 = TreeNode::new(2);
l1.left = Option::Some(Rc::new(RefCell::new(TreeNode::new(4))));
let mut r1 = TreeNode::new(3);
r1.right = Option::Some(Rc::new(RefCell::new(TreeNode::new(5))));
root.left = Option::Some(Rc::new(RefCell::new(l1)));
root.right = Option::Some(Rc::new(RefCell::new(r1)));
let root = Option::Some(Rc::new(RefCell::new(root)));
let result = Solution::zigzag_level_order(root);
println!("{:?}", result);
}
struct Solution {}
|
// Copyright 2020 Jesper de Jong
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::convert::TryFrom;
use std::ops::{Add, AddAssign, Div, DivAssign, Index, IndexMut, Mul, MulAssign, Neg, Range, Sub, SubAssign};
use std::sync::Arc;
use array_macro::array;
/// Scalar type used for the elements of points, vectors and matrices.
pub type Scalar = f32;
/// Trait for types that have `min()` and `max()` methods.
pub trait MinMax {
/// Compares and returns the minimum of two values.
fn min(self, other: Self) -> Self;
/// Compares and returns the maximum of two values.
fn max(self, other: Self) -> Self;
}
/// Compares and returns the minimum of two values.
#[inline]
pub fn min<T: MinMax>(a: T, b: T) -> T {
a.min(b)
}
/// Compares and returns the maximum of two values.
#[inline]
pub fn max<T: MinMax>(a: T, b: T) -> T {
a.max(b)
}
/// Trait for types for which a distance between values can be computed.
pub trait Distance: RelativeDistance {
/// The output type which expresses the distance between values.
type Output;
/// Computes and returns the distance between two values.
fn distance(self, other: Self) -> Self::Output;
}
/// Computes and returns the distance between two values.
#[inline]
pub fn distance<T: Distance>(a: T, b: T) -> T::Output {
a.distance(b)
}
/// Trait for types for which distances between values can be compared.
pub trait RelativeDistance {
/// Checks which of the values `a` and `b` is closest to this value and returns the closest one.
fn closest(self, a: Self, b: Self) -> Self;
/// Checks which of the values `a` and `b` is farthest from this value and returns the farthest one.
fn farthest(self, a: Self, b: Self) -> Self;
}
/// Trait for types for which a length can be computed.
pub trait Length: RelativeLength {
/// The output type which expresses the length of a value.
type Output;
/// Computes and returns the length of a value.
fn length(self) -> Self::Output;
}
/// Trait for types for which lengths can be compared.
pub trait RelativeLength {
/// Returns the shortest of two values.
fn shortest(self, other: Self) -> Self;
/// Returns the longest of two values.
fn longest(self, other: Self) -> Self;
}
/// Returns the shortest of two values.
#[inline]
pub fn shortest<T: RelativeLength>(a: T, b: T) -> T {
a.shortest(b)
}
/// Returns the longest of two values.
#[inline]
pub fn longest<T: RelativeLength>(a: T, b: T) -> T {
a.longest(b)
}
/// Trait for types for which a dot product between values can be computed.
pub trait DotProduct<U> {
/// The output type which expresses the dot product between values.
type Output;
/// Computes and returns the dot product between two values.
fn dot(self, other: U) -> Self::Output;
}
/// Computes and returns the dot product between two values.
#[inline]
pub fn dot<T: DotProduct<U>, U>(a: T, b: U) -> T::Output {
a.dot(b)
}
/// Trait for types for which a cross product between values can be computed.
pub trait CrossProduct<U> {
/// The output type which expresses the cross product between values.
type Output;
/// Computes and returns the cross product between two values.
fn cross(self, other: U) -> Self::Output;
}
/// Computes and returns the cross product between two values.
#[inline]
pub fn cross<T: CrossProduct<U>, U>(a: T, b: U) -> T::Output {
a.cross(b)
}
/// Trait for types for which a union with a value can be computed.
pub trait Union<U> {
/// The output type which represents the union between values.
type Output;
/// Computes and returns the union between two values.
fn union(self, other: U) -> Self::Output;
}
/// Computes and returns the union between two values.
#[inline]
pub fn union<T: Union<U>, U>(a: T, b: U) -> T::Output {
a.union(b)
}
/// Trait for types for which an intersection with a value can be computed.
pub trait Intersection<U> {
/// The output type which represents the intersection between values.
type Output;
/// Computes and returns the intersection between two values.
///
/// Returns `Some` when the intersection between the values is not empty; `None` if the intersection is empty.
fn intersection(self, other: U) -> Option<Self::Output>;
}
/// Computes and returns the intersection between two values.
///
/// Returns `Some` when the intersection between the values is not empty; `None` if the intersection is empty.
#[inline]
pub fn intersection<T: Intersection<U>, U>(a: T, b: U) -> Option<T::Output> {
a.intersection(b)
}
/// Trait to be implemented for `Transform2` or `Transform3` for types which can be transformed.
pub trait Transform<T> {
/// The output type that results from transforming a value of type `T`.
type Output;
/// Transforms a value of type `T`.
fn transform(&self, value: T) -> Self::Output;
}
/// Error returned when computing the inverse of a singular matrix is attempted.
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub struct NonInvertibleMatrixError;
/// Dimension in 2D space.
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum Dimension2 { X, Y }
/// Point in 2D space.
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct Point2 {
pub x: Scalar,
pub y: Scalar,
}
/// Vector in 2D space.
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct Vector2 {
pub x: Scalar,
pub y: Scalar,
}
/// Ray in 2D space.
#[derive(Clone, PartialEq, Debug)]
pub struct Ray2 {
pub origin: Point2,
pub direction: Vector2,
}
/// Axis-aligned bounding box in 2D space.
#[derive(Clone, PartialEq, Debug)]
pub struct BoundingBox2 {
pub min: Point2,
pub max: Point2,
}
/// Matrix with 3 rows and 3 columns for transformations in 2D space.
#[derive(Clone, PartialEq, Debug)]
pub struct Matrix3x3 {
m: [Scalar; 9]
}
/// Transform for transformations in 2D space.
#[derive(Clone, PartialEq, Debug)]
pub struct Transform2 {
pub forward: Arc<Matrix3x3>,
pub inverse: Arc<Matrix3x3>,
}
/// Dimension in 3D space.
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub enum Dimension3 { X, Y, Z }
/// Point in 3D space.
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct Point3 {
pub x: Scalar,
pub y: Scalar,
pub z: Scalar,
}
/// Vector in 3D space.
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct Vector3 {
pub x: Scalar,
pub y: Scalar,
pub z: Scalar,
}
/// Surface normal in 3D space.
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct Normal3 {
pub x: Scalar,
pub y: Scalar,
pub z: Scalar,
}
/// Ray in 3D space.
#[derive(Clone, PartialEq, Debug)]
pub struct Ray3 {
pub origin: Point3,
pub direction: Vector3,
}
/// Axis-aligned bounding box in 3D space.
#[derive(Clone, PartialEq, Debug)]
pub struct BoundingBox3 {
pub min: Point3,
pub max: Point3,
}
/// Matrix with 4 rows and 4 columns for transformations in 3D space.
#[derive(Clone, PartialEq, Debug)]
pub struct Matrix4x4 {
m: [Scalar; 16]
}
/// Transform for transformations in 3D space.
#[derive(Clone, PartialEq, Debug)]
pub struct Transform3 {
pub forward: Arc<Matrix4x4>,
pub inverse: Arc<Matrix4x4>,
}
// ===== Scalar ================================================================================================================================================
impl MinMax for Scalar {
/// Compares and returns the minimum of two scalars.
#[inline]
fn min(self, s: Scalar) -> Scalar {
Scalar::min(self, s)
}
/// Compares and returns the maximum of two scalars.
#[inline]
fn max(self, s: Scalar) -> Scalar {
Scalar::max(self, s)
}
}
// ===== Point2 ================================================================================================================================================
impl Point2 {
/// Creates and returns a new `Point2` with x and y coordinates.
#[inline]
pub fn new(x: Scalar, y: Scalar) -> Point2 {
Point2 { x, y }
}
/// Returns a `Point2` which represents the origin (x = 0 and y = 0).
#[inline]
pub fn origin() -> Point2 {
Point2::new(0.0, 0.0)
}
/// Returns the dimension with the smallest extent of this point.
#[inline]
pub fn min_dimension(self) -> Dimension2 {
let Point2 { x, y } = self.abs();
if x <= y { Dimension2::X } else { Dimension2::Y }
}
/// Returns the dimension with the largest extent of this point.
#[inline]
pub fn max_dimension(self) -> Dimension2 {
let Point2 { x, y } = self.abs();
if x > y { Dimension2::X } else { Dimension2::Y }
}
/// Returns the element-wise floor of this point.
#[inline]
pub fn floor(self) -> Point2 {
Point2::new(self.x.floor(), self.y.floor())
}
/// Returns the element-wise ceiling of this point.
#[inline]
pub fn ceil(self) -> Point2 {
Point2::new(self.x.ceil(), self.y.ceil())
}
/// Returns the element-wise rounded value of this point.
#[inline]
pub fn round(self) -> Point2 {
Point2::new(self.x.round(), self.y.round())
}
/// Returns the element-wise truncated value of this point.
#[inline]
pub fn trunc(self) -> Point2 {
Point2::new(self.x.trunc(), self.y.trunc())
}
/// Returns the element-wise fractional value of this point.
#[inline]
pub fn fract(self) -> Point2 {
Point2::new(self.x.fract(), self.y.fract())
}
/// Returns the element-wise absolute value of this point.
#[inline]
pub fn abs(self) -> Point2 {
Point2::new(self.x.abs(), self.y.abs())
}
/// Returns a point with a permutation of the elements of this point.
#[inline]
pub fn permute(self, dim_x: Dimension2, dim_y: Dimension2) -> Point2 {
Point2::new(self[dim_x], self[dim_y])
}
}
impl MinMax for Point2 {
/// Returns the element-wise minimum of two points.
#[inline]
fn min(self, p: Point2) -> Point2 {
Point2::new(min(self.x, p.x), min(self.y, p.y))
}
/// Returns the element-wise maximum of two points.
#[inline]
fn max(self, p: Point2) -> Point2 {
Point2::new(max(self.x, p.x), max(self.y, p.y))
}
}
impl Distance for Point2 {
type Output = Scalar;
/// Computes and returns the distance between two points.
#[inline]
fn distance(self, p: Point2) -> Scalar {
(p - self).length()
}
}
impl RelativeDistance for Point2 {
/// Checks which of the points `p1` and `p2` is closest to this point and returns the closest one.
///
/// This is more computationally efficient than computing the distance between this point and the points `p1` and `p2` and comparing the distances,
/// because square root operations that are needed for computing the distances are avoided.
#[inline]
fn closest(self, p1: Point2, p2: Point2) -> Point2 {
let (dp1, dp2) = (p1 - self, p2 - self);
if dot(dp1, dp1) <= dot(dp2, dp2) { p1 } else { p2 }
}
/// Checks which of the points `p1` and `p2` is farthest from this point and returns the farthest one.
///
/// This is more computationally efficient than computing the distance between this point and the points `p1` and `p2` and comparing the distances,
/// because square root operations that are needed for computing the distances are avoided.
#[inline]
fn farthest(self, p1: Point2, p2: Point2) -> Point2 {
let (dp1, dp2) = (p1 - self, p2 - self);
if dot(dp1, dp1) > dot(dp2, dp2) { p1 } else { p2 }
}
}
impl Index<Dimension2> for Point2 {
type Output = Scalar;
#[inline]
fn index(&self, dim: Dimension2) -> &Scalar {
match dim {
Dimension2::X => &self.x,
Dimension2::Y => &self.y,
}
}
}
impl IndexMut<Dimension2> for Point2 {
#[inline]
fn index_mut(&mut self, dim: Dimension2) -> &mut Scalar {
match dim {
Dimension2::X => &mut self.x,
Dimension2::Y => &mut self.y,
}
}
}
impl Add<Vector2> for Point2 {
type Output = Point2;
#[inline]
fn add(self, v: Vector2) -> Point2 {
Point2::new(self.x + v.x, self.y + v.y)
}
}
impl AddAssign<Vector2> for Point2 {
#[inline]
fn add_assign(&mut self, v: Vector2) {
self.x += v.x;
self.y += v.y;
}
}
impl Sub<Vector2> for Point2 {
type Output = Point2;
#[inline]
fn sub(self, v: Vector2) -> Point2 {
Point2::new(self.x - v.x, self.y - v.y)
}
}
impl SubAssign<Vector2> for Point2 {
#[inline]
fn sub_assign(&mut self, v: Vector2) {
self.x -= v.x;
self.y -= v.y;
}
}
impl Sub<Point2> for Point2 {
type Output = Vector2;
#[inline]
fn sub(self, p: Point2) -> Vector2 {
Vector2::new(self.x - p.x, self.y - p.y)
}
}
impl Neg for Point2 {
type Output = Point2;
#[inline]
fn neg(self) -> Point2 {
Point2::new(-self.x, -self.y)
}
}
impl Mul<Scalar> for Point2 {
type Output = Point2;
#[inline]
fn mul(self, s: Scalar) -> Point2 {
Point2::new(self.x * s, self.y * s)
}
}
impl Mul<Point2> for Scalar {
type Output = Point2;
#[inline]
fn mul(self, p: Point2) -> Point2 {
p * self
}
}
impl MulAssign<Scalar> for Point2 {
#[inline]
fn mul_assign(&mut self, s: Scalar) {
self.x *= s;
self.y *= s;
}
}
impl Div<Scalar> for Point2 {
type Output = Point2;
#[inline]
fn div(self, s: Scalar) -> Point2 {
Point2::new(self.x / s, self.y / s)
}
}
impl DivAssign<Scalar> for Point2 {
#[inline]
fn div_assign(&mut self, s: Scalar) {
self.x /= s;
self.y /= s;
}
}
impl Transform<Point2> for Transform2 {
type Output = Point2;
/// Transforms a point.
#[inline]
fn transform(&self, p: Point2) -> Point2 {
&*self.forward * p
}
}
impl From<Vector2> for Point2 {
#[inline]
fn from(v: Vector2) -> Point2 {
Point2::new(v.x, v.y)
}
}
// ===== Vector2 ===============================================================================================================================================
impl Vector2 {
/// Creates and returns a new `Vector2` with x and y coordinates.
#[inline]
pub fn new(x: Scalar, y: Scalar) -> Vector2 {
Vector2 { x, y }
}
/// Returns a `Vector2` which represents the zero vector (x = 0 and y = 0).
#[inline]
pub fn zero() -> Vector2 {
Vector2::new(0.0, 0.0)
}
/// Returns a `Vector2` of length 1 which represents the X axis (x = 1 and y = 0).
#[inline]
pub fn x_axis() -> Vector2 {
Vector2::new(1.0, 0.0)
}
/// Returns a `Vector2` of length 1 which represents the Y axis (x = 0 and y = 1).
#[inline]
pub fn y_axis() -> Vector2 {
Vector2::new(0.0, 1.0)
}
/// Returns a `Vector2` of length 1 which represents the axis specified by a dimension.
#[inline]
pub fn axis(dim: Dimension2) -> Vector2 {
match dim {
Dimension2::X => Vector2::x_axis(),
Dimension2::Y => Vector2::y_axis(),
}
}
/// Creates and returns a new `Vector2` which points in the same direction as this vector, but with length 1.
#[inline]
pub fn normalize(self) -> Vector2 {
self / self.length()
}
/// Returns the dimension with the smallest extent of this vector.
#[inline]
pub fn min_dimension(self) -> Dimension2 {
let Vector2 { x, y } = self.abs();
if x <= y { Dimension2::X } else { Dimension2::Y }
}
/// Returns the dimension with the largest extent of this vector.
#[inline]
pub fn max_dimension(self) -> Dimension2 {
let Vector2 { x, y } = self.abs();
if x > y { Dimension2::X } else { Dimension2::Y }
}
/// Returns the element-wise floor of this vector.
#[inline]
pub fn floor(self) -> Vector2 {
Vector2::new(self.x.floor(), self.y.floor())
}
/// Returns the element-wise ceiling of this vector.
#[inline]
pub fn ceil(self) -> Vector2 {
Vector2::new(self.x.ceil(), self.y.ceil())
}
/// Returns the element-wise rounded value of this vector.
#[inline]
pub fn round(self) -> Vector2 {
Vector2::new(self.x.round(), self.y.round())
}
/// Returns the element-wise truncated value of this vector.
#[inline]
pub fn trunc(self) -> Vector2 {
Vector2::new(self.x.trunc(), self.y.trunc())
}
/// Returns the element-wise fractional value of this vector.
#[inline]
pub fn fract(self) -> Vector2 {
Vector2::new(self.x.fract(), self.y.fract())
}
/// Returns the element-wise absolute value of this vector.
#[inline]
pub fn abs(self) -> Vector2 {
Vector2::new(self.x.abs(), self.y.abs())
}
/// Returns a point with a permutation of the elements of this vector.
#[inline]
pub fn permute(self, dim_x: Dimension2, dim_y: Dimension2) -> Vector2 {
Vector2::new(self[dim_x], self[dim_y])
}
}
impl MinMax for Vector2 {
/// Returns the element-wise minimum of two vectors.
#[inline]
fn min(self, v: Vector2) -> Vector2 {
Vector2::new(min(self.x, v.x), min(self.y, v.y))
}
/// Returns the element-wise maximum of two vectors.
#[inline]
fn max(self, v: Vector2) -> Vector2 {
Vector2::new(max(self.x, v.x), max(self.y, v.y))
}
}
impl Length for Vector2 {
type Output = Scalar;
/// Computes and returns the length of a vector.
#[inline]
fn length(self) -> Scalar {
Scalar::sqrt(dot(self, self))
}
}
impl RelativeLength for Vector2 {
/// Returns the shortest of two vectors.
///
/// This is more computationally efficient than computing the lengths of the vectors and comparing them,
/// because square root operations that are needed for computing the lengths are avoided.
#[inline]
fn shortest(self, v: Vector2) -> Vector2 {
if dot(self, self) <= dot(v, v) { self } else { v }
}
/// Returns the longest of two vectors.
///
/// This is more computationally efficient than computing the lengths of the vectors and comparing them,
/// because square root operations that are needed for computing the lengths are avoided.
#[inline]
fn longest(self, v: Vector2) -> Vector2 {
if dot(self, self) > dot(v, v) { self } else { v }
}
}
impl DotProduct<Vector2> for Vector2 {
type Output = Scalar;
/// Computes and returns the dot product between two vectors.
#[inline]
fn dot(self, v: Vector2) -> Scalar {
self.x * v.x + self.y * v.y
}
}
impl Index<Dimension2> for Vector2 {
type Output = Scalar;
#[inline]
fn index(&self, dim: Dimension2) -> &Scalar {
match dim {
Dimension2::X => &self.x,
Dimension2::Y => &self.y,
}
}
}
impl IndexMut<Dimension2> for Vector2 {
#[inline]
fn index_mut(&mut self, dim: Dimension2) -> &mut Scalar {
match dim {
Dimension2::X => &mut self.x,
Dimension2::Y => &mut self.y,
}
}
}
impl Add<Vector2> for Vector2 {
type Output = Vector2;
#[inline]
fn add(self, v: Vector2) -> Vector2 {
Vector2::new(self.x + v.x, self.y + v.y)
}
}
impl AddAssign<Vector2> for Vector2 {
#[inline]
fn add_assign(&mut self, v: Vector2) {
self.x += v.x;
self.y += v.y;
}
}
impl Sub<Vector2> for Vector2 {
type Output = Vector2;
#[inline]
fn sub(self, v: Vector2) -> Vector2 {
Vector2::new(self.x - v.x, self.y - v.y)
}
}
impl SubAssign<Vector2> for Vector2 {
#[inline]
fn sub_assign(&mut self, v: Vector2) {
self.x -= v.x;
self.y -= v.y;
}
}
impl Neg for Vector2 {
type Output = Vector2;
#[inline]
fn neg(self) -> Vector2 {
Vector2::new(-self.x, -self.y)
}
}
impl Mul<Scalar> for Vector2 {
type Output = Vector2;
#[inline]
fn mul(self, s: Scalar) -> Vector2 {
Vector2::new(self.x * s, self.y * s)
}
}
impl Mul<Vector2> for Scalar {
type Output = Vector2;
#[inline]
fn mul(self, v: Vector2) -> Vector2 {
v * self
}
}
impl MulAssign<Scalar> for Vector2 {
#[inline]
fn mul_assign(&mut self, s: Scalar) {
self.x *= s;
self.y *= s;
}
}
impl Div<Scalar> for Vector2 {
type Output = Vector2;
#[inline]
fn div(self, s: Scalar) -> Vector2 {
Vector2::new(self.x / s, self.y / s)
}
}
impl DivAssign<Scalar> for Vector2 {
#[inline]
fn div_assign(&mut self, s: Scalar) {
self.x /= s;
self.y /= s;
}
}
impl Transform<Vector2> for Transform2 {
type Output = Vector2;
/// Transforms a vector.
#[inline]
fn transform(&self, v: Vector2) -> Vector2 {
&*self.forward * v
}
}
impl From<Point2> for Vector2 {
#[inline]
fn from(p: Point2) -> Vector2 {
Vector2::new(p.x, p.y)
}
}
// ===== Ray2 ==================================================================================================================================================
impl Ray2 {
/// Creates and returns a new `Ray2` with an origin point and direction vector.
#[inline]
pub fn new(origin: Point2, direction: Vector2) -> Ray2 {
Ray2 { origin, direction }
}
/// Computes and returns a point at a distance along this ray.
#[inline]
pub fn at(&self, distance: Scalar) -> Point2 {
self.origin + self.direction * distance
}
}
impl Transform<&Ray2> for Transform2 {
type Output = Ray2;
/// Transforms a ray.
#[inline]
fn transform(&self, ray: &Ray2) -> Ray2 {
Ray2::new(self.transform(ray.origin), self.transform(ray.direction))
}
}
// ===== BoundingBox2 ==========================================================================================================================================
impl BoundingBox2 {
/// Creates and returns a new `BoundingBox2` with minimum and maximum corner points.
#[inline]
pub fn new(min: Point2, max: Point2) -> BoundingBox2 {
BoundingBox2 { min, max }
}
/// Returns an empty `BoundingBox2`.
#[inline]
pub fn empty() -> BoundingBox2 {
BoundingBox2::new(Point2::new(Scalar::INFINITY, Scalar::INFINITY), Point2::new(Scalar::NEG_INFINITY, Scalar::NEG_INFINITY))
}
/// Returns an infinite `BoundingBox2` which contains all of 2D space.
#[inline]
pub fn infinite() -> BoundingBox2 {
BoundingBox2::new(Point2::new(Scalar::NEG_INFINITY, Scalar::NEG_INFINITY), Point2::new(Scalar::INFINITY, Scalar::INFINITY))
}
/// Returns the width (extent in the X dimension) of this bounding box.
#[inline]
pub fn width(&self) -> Scalar {
self.max.x - self.min.x
}
/// Returns the height (extent in the Y dimension) of this bounding box.
#[inline]
pub fn height(&self) -> Scalar {
self.max.y - self.min.y
}
/// Returns the extent of this bounding box in a dimension.
#[inline]
pub fn extent(&self, dim: Dimension2) -> Scalar {
match dim {
Dimension2::X => self.width(),
Dimension2::Y => self.height(),
}
}
/// Returns the dimension with the smallest extent of this bounding box.
#[inline]
pub fn min_dimension(&self) -> Dimension2 {
let d = self.diagonal();
if d.x <= d.y { Dimension2::X } else { Dimension2::Y }
}
/// Returns the dimension with the largest extent of this bounding box.
#[inline]
pub fn max_dimension(&self) -> Dimension2 {
let d = self.diagonal();
if d.x > d.y { Dimension2::X } else { Dimension2::Y }
}
/// Returns the area (width times height) of this bounding box.
#[inline]
pub fn area(&self) -> Scalar {
let d = self.diagonal();
d.x * d.y
}
/// Returns the center point of this bounding box.
#[inline]
pub fn center(&self) -> Point2 {
self.min + self.diagonal() * 0.5
}
/// Returns a corner point of this bounding box, indicated by an index (which must be between 0 and 3 inclusive).
#[inline]
pub fn corner(&self, index: usize) -> Point2 {
debug_assert!(index < 4, "Invalid corner index: {}", index);
let x = if index & 0b01 == 0 { self.min.x } else { self.max.x };
let y = if index & 0b10 == 0 { self.min.y } else { self.max.y };
Point2::new(x, y)
}
/// Returns the diagonal of this bounding box as a vector.
#[inline]
pub fn diagonal(&self) -> Vector2 {
self.max - self.min
}
/// Checks if two bounding boxes overlap.
#[inline]
pub fn overlaps(&self, bb: &BoundingBox2) -> bool {
//@formatter:off
self.max.x >= bb.min.x && self.min.x <= bb.max.x &&
self.max.y >= bb.min.y && self.min.y <= bb.max.y
//@formatter:on
}
/// Checks if a point is inside this bounding box.
#[inline]
pub fn is_inside(&self, p: Point2) -> bool {
//@formatter:off
p.x >= self.min.x && p.x <= self.max.x &&
p.y >= self.min.y && p.y <= self.max.y
//@formatter:on
}
/// Computes the closest intersection of this bounding box with a ray within a range.
///
/// Returns a `Some` containing the closest intersection, or `None` if the ray does not intersect the bounding box within the range.
pub fn intersect_ray(&self, ray: &Ray2, range: &Range<Scalar>) -> Option<Scalar> {
let (start, end) = (range.start, range.end);
let d1 = (self.min.x - ray.origin.x) / ray.direction.x;
let d2 = (self.max.x - ray.origin.x) / ray.direction.x;
let start = max(start, min(d1, d2));
let end = min(end, max(d1, d2));
if start > end {
return None;
}
let d1 = (self.min.y - ray.origin.y) / ray.direction.y;
let d2 = (self.max.y - ray.origin.y) / ray.direction.y;
let start = max(start, min(d1, d2));
let end = min(end, max(d1, d2));
if start <= end {
Some(start)
} else {
None
}
}
}
impl Union<&BoundingBox2> for BoundingBox2 {
type Output = BoundingBox2;
/// Computes and returns the union between two bounding boxes.
///
/// The union is the smallest bounding box that contains both bounding boxes.
#[inline]
fn union(self, bb: &BoundingBox2) -> BoundingBox2 {
BoundingBox2::new(min(self.min, bb.min), max(self.max, bb.max))
}
}
impl Union<Point2> for BoundingBox2 {
type Output = BoundingBox2;
/// Computes and returns the union between this bounding box and a point.
///
/// The union is the smallest bounding box that contains both the bounding box and the point.
#[inline]
fn union(self, p: Point2) -> BoundingBox2 {
BoundingBox2::new(min(self.min, p), max(self.max, p))
}
}
impl Intersection<&BoundingBox2> for BoundingBox2 {
type Output = BoundingBox2;
/// Computes and returns the intersection between two bounding boxes.
///
/// The intersection is the largest bounding box that contains the region where the two bounding boxes overlap.
///
/// Returns `Some` when the bounding boxes overlap; `None` if the bounding boxes do not overlap.
#[inline]
fn intersection(self, bb: &BoundingBox2) -> Option<BoundingBox2> {
if self.overlaps(bb) {
Some(BoundingBox2::new(max(self.min, bb.min), min(self.max, bb.max)))
} else {
None
}
}
}
impl Transform<&BoundingBox2> for Transform2 {
type Output = BoundingBox2;
/// Transforms a bounding box.
fn transform(&self, bb: &BoundingBox2) -> BoundingBox2 {
let o = self.transform(bb.min);
let d = self.transform(bb.diagonal());
let (mut min_corner, mut max_corner) = (o, o);
for i in 1..4 {
let mut corner = o;
if i & 0b01 != 0 { corner.x += d.x; }
if i & 0b10 != 0 { corner.y += d.y; }
min_corner = min(min_corner, corner);
max_corner = max(max_corner, corner);
}
BoundingBox2::new(min_corner, max_corner)
}
}
// ===== Matrix3x3 =============================================================================================================================================
impl Matrix3x3 {
/// Creates and returns a new `Matrix3x3` with the specified elements.
#[inline]
pub fn new(m: [Scalar; 9]) -> Matrix3x3 {
Matrix3x3 { m }
}
/// Returns a `Matrix3x3` which represents the identity matrix.
#[inline]
pub fn identity() -> Matrix3x3 {
Matrix3x3::new([
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0,
])
}
/// Returns a translation matrix which translates over a vector.
#[inline]
pub fn translate(v: Vector2) -> Matrix3x3 {
Matrix3x3::new([
1.0, 0.0, v.x,
0.0, 1.0, v.y,
0.0, 0.0, 1.0,
])
}
/// Returns a rotation matrix which rotates around the origin.
#[inline]
pub fn rotate(angle: Scalar) -> Matrix3x3 {
let (sin, cos) = angle.sin_cos();
Matrix3x3::new([
cos, -sin, 0.0,
sin, cos, 0.0,
0.0, 0.0, 1.0,
])
}
/// Returns a matrix which scales by factors in the X and Y dimensions.
#[inline]
pub fn scale(sx: Scalar, sy: Scalar) -> Matrix3x3 {
Matrix3x3::new([
sx, 0.0, 0.0,
0.0, sy, 0.0,
0.0, 0.0, 1.0,
])
}
/// Returns a matrix which scales uniformly in all dimensions by a factor.
#[inline]
pub fn scale_uniform(s: Scalar) -> Matrix3x3 {
Matrix3x3::new([
s, 0.0, 0.0,
0.0, s, 0.0,
0.0, 0.0, 1.0,
])
}
/// Returns an element at a row and column of the matrix.
#[inline]
pub fn get(&self, row: usize, col: usize) -> Scalar {
debug_assert!(row < 3, "Invalid row index: {}", row);
debug_assert!(col < 3, "Invalid column index: {}", row);
self.m[row * 3 + col]
}
/// Returns a mutable reference to an element at a row and column of the matrix.
#[inline]
pub fn get_mut(&mut self, row: usize, col: usize) -> &mut Scalar {
debug_assert!(row < 3, "Invalid row index: {}", row);
debug_assert!(col < 3, "Invalid column index: {}", row);
&mut self.m[row * 3 + col]
}
/// Sets the value of an element at a row and column of the matrix.
#[inline]
pub fn set(&mut self, row: usize, col: usize, value: Scalar) {
debug_assert!(row < 3, "Invalid row index: {}", row);
debug_assert!(col < 3, "Invalid column index: {}", row);
self.m[row * 3 + col] = value;
}
/// Returns the transpose of this matrix.
#[inline]
pub fn transpose(&self) -> Matrix3x3 {
Matrix3x3::new([
self.m[0], self.m[3], self.m[6],
self.m[1], self.m[4], self.m[7],
self.m[2], self.m[5], self.m[8],
])
}
/// Computes and returns the inverse of this matrix.
///
/// If this matrix is singular, a `NonInvertibleMatrixError` is returned.
pub fn inverse(&self) -> Result<Matrix3x3, NonInvertibleMatrixError> {
let det = self.m[0] * self.m[4] * self.m[8] + self.m[1] * self.m[5] * self.m[6] + self.m[2] * self.m[3] * self.m[7]
- self.m[2] * self.m[4] * self.m[6] - self.m[1] * self.m[3] * self.m[8] - self.m[0] * self.m[5] * self.m[7];
if det != 0.0 {
let inv_det = det.recip();
Ok(Matrix3x3::new([
(self.m[4] * self.m[8] - self.m[5] * self.m[7]) * inv_det,
(self.m[2] * self.m[7] - self.m[1] * self.m[8]) * inv_det,
(self.m[1] * self.m[5] - self.m[2] * self.m[4]) * inv_det,
(self.m[5] * self.m[6] - self.m[3] * self.m[8]) * inv_det,
(self.m[0] * self.m[8] - self.m[2] * self.m[6]) * inv_det,
(self.m[2] * self.m[3] - self.m[0] * self.m[5]) * inv_det,
(self.m[3] * self.m[7] - self.m[4] * self.m[6]) * inv_det,
(self.m[1] * self.m[6] - self.m[0] * self.m[7]) * inv_det,
(self.m[0] * self.m[4] - self.m[1] * self.m[3]) * inv_det,
]))
} else {
Err(NonInvertibleMatrixError)
}
}
}
impl Mul<Scalar> for &Matrix3x3 {
type Output = Matrix3x3;
#[inline]
fn mul(self, s: Scalar) -> Matrix3x3 {
Matrix3x3::new(array![|i| self.m[i] * s; 9])
}
}
impl Mul<&Matrix3x3> for Scalar {
type Output = Matrix3x3;
#[inline]
fn mul(self, m: &Matrix3x3) -> Matrix3x3 {
m * self
}
}
impl MulAssign<Scalar> for &mut Matrix3x3 {
#[inline]
fn mul_assign(&mut self, s: Scalar) {
for m in &mut self.m { *m *= s; }
}
}
impl Div<Scalar> for &Matrix3x3 {
type Output = Matrix3x3;
#[inline]
fn div(self, s: Scalar) -> Matrix3x3 {
Matrix3x3::new(array![|i| self.m[i] / s; 9])
}
}
impl DivAssign<Scalar> for &mut Matrix3x3 {
#[inline]
fn div_assign(&mut self, s: Scalar) {
for m in &mut self.m { *m /= s; }
}
}
impl Mul<Point2> for &Matrix3x3 {
type Output = Point2;
#[inline]
fn mul(self, p: Point2) -> Point2 {
let x = self.m[0] * p.x + self.m[1] * p.y + self.m[2];
let y = self.m[3] * p.x + self.m[4] * p.y + self.m[5];
let w = self.m[6] * p.x + self.m[7] * p.y + self.m[8];
Point2::new(x / w, y / w)
}
}
impl Mul<&Matrix3x3> for Point2 {
type Output = Point2;
#[inline]
fn mul(self, m: &Matrix3x3) -> Point2 {
let x = self.x * m.m[0] + self.y * m.m[3] + m.m[6];
let y = self.x * m.m[1] + self.y * m.m[4] + m.m[7];
let w = self.x * m.m[2] + self.y * m.m[5] + m.m[8];
Point2::new(x / w, y / w)
}
}
impl Mul<Vector2> for &Matrix3x3 {
type Output = Vector2;
#[inline]
fn mul(self, v: Vector2) -> Vector2 {
let x = self.m[0] * v.x + self.m[1] * v.y;
let y = self.m[3] * v.x + self.m[4] * v.y;
Vector2::new(x, y)
}
}
impl Mul<&Matrix3x3> for Vector2 {
type Output = Vector2;
#[inline]
fn mul(self, m: &Matrix3x3) -> Vector2 {
let x = self.x * m.m[0] + self.y * m.m[3];
let y = self.x * m.m[1] + self.y * m.m[4];
Vector2::new(x, y)
}
}
impl Mul<&Matrix3x3> for &Matrix3x3 {
type Output = Matrix3x3;
#[inline]
fn mul(self, m: &Matrix3x3) -> Matrix3x3 {
Matrix3x3::new([
self.m[0] * m.m[0] + self.m[1] * m.m[3] + self.m[2] * m.m[6],
self.m[0] * m.m[1] + self.m[1] * m.m[4] + self.m[2] * m.m[7],
self.m[0] * m.m[2] + self.m[1] * m.m[5] + self.m[2] * m.m[8],
self.m[3] * m.m[0] + self.m[4] * m.m[3] + self.m[5] * m.m[6],
self.m[3] * m.m[1] + self.m[4] * m.m[4] + self.m[5] * m.m[7],
self.m[3] * m.m[2] + self.m[4] * m.m[5] + self.m[5] * m.m[8],
self.m[6] * m.m[0] + self.m[7] * m.m[3] + self.m[8] * m.m[6],
self.m[6] * m.m[1] + self.m[7] * m.m[4] + self.m[8] * m.m[7],
self.m[6] * m.m[2] + self.m[7] * m.m[5] + self.m[8] * m.m[8],
])
}
}
// ===== Transform2 ============================================================================================================================================
impl Transform2 {
/// Creates and returns a new `Transform2` with a transformation matrix and its inverse.
#[inline]
pub fn new(forward: Arc<Matrix3x3>, inverse: Arc<Matrix3x3>) -> Transform2 {
Transform2 { forward, inverse }
}
/// Returns a `Transform2` which represents the identity transform.
#[inline]
pub fn identity() -> Transform2 {
let forward = Arc::new(Matrix3x3::identity());
let inverse = forward.clone();
Transform2::new(forward, inverse)
}
/// Returns a translation transform over a vector.
#[inline]
pub fn translate(v: Vector2) -> Transform2 {
Transform2::new(Arc::new(Matrix3x3::translate(v)), Arc::new(Matrix3x3::translate(-v)))
}
/// Returns a rotation transform which rotates around the origin.
#[inline]
pub fn rotate(angle: Scalar) -> Transform2 {
let forward = Matrix3x3::rotate(angle);
let inverse = forward.transpose();
Transform2::new(Arc::new(forward), Arc::new(inverse))
}
/// Returns a transform which scales by factors in the X and Y dimensions.
#[inline]
pub fn scale(sx: Scalar, sy: Scalar) -> Transform2 {
Transform2::new(Arc::new(Matrix3x3::scale(sx, sy)), Arc::new(Matrix3x3::scale(sx.recip(), sy.recip())))
}
/// Returns a transform which scales uniformly in all dimensions by a factor.
#[inline]
pub fn scale_uniform(s: Scalar) -> Transform2 {
Transform2::new(Arc::new(Matrix3x3::scale_uniform(s)), Arc::new(Matrix3x3::scale_uniform(s.recip())))
}
/// Computes and returns a composite transform, which first applies this and then the other transform.
#[inline]
pub fn and_then(&self, transform: &Transform2) -> Transform2 {
Transform2::new(Arc::new(&*transform.forward * &*self.forward), Arc::new(&*self.inverse * &*transform.inverse))
}
/// Returns the inverse of this transform.
#[inline]
pub fn inverse(&self) -> Transform2 {
Transform2::new(self.inverse.clone(), self.forward.clone())
}
}
impl TryFrom<Matrix3x3> for Transform2 {
type Error = NonInvertibleMatrixError;
#[inline]
fn try_from(forward: Matrix3x3) -> Result<Transform2, NonInvertibleMatrixError> {
let inverse = forward.inverse()?;
Ok(Transform2::new(Arc::new(forward), Arc::new(inverse)))
}
}
// ===== Point3 ================================================================================================================================================
impl Point3 {
/// Creates and returns a new `Point3` with x, y and z coordinates.
#[inline]
pub fn new(x: Scalar, y: Scalar, z: Scalar) -> Point3 {
Point3 { x, y, z }
}
/// Returns a `Point3` which represents the origin (x = 0, y = 0 and z = 0).
#[inline]
pub fn origin() -> Point3 {
Point3::new(0.0, 0.0, 0.0)
}
/// Returns the dimension with the smallest extent of this point.
#[inline]
pub fn min_dimension(self) -> Dimension3 {
let Point3 { x, y, z } = self.abs();
if x <= y && x <= z { Dimension3::X } else if y <= z { Dimension3::Y } else { Dimension3::Z }
}
/// Returns the dimension with the largest extent of this point.
#[inline]
pub fn max_dimension(self) -> Dimension3 {
let Point3 { x, y, z } = self.abs();
if x > y && x > z { Dimension3::X } else if y > z { Dimension3::Y } else { Dimension3::Z }
}
/// Returns the element-wise floor of this point.
#[inline]
pub fn floor(self) -> Point3 {
Point3::new(self.x.floor(), self.y.floor(), self.z.floor())
}
/// Returns the element-wise ceiling of this point.
#[inline]
pub fn ceil(self) -> Point3 {
Point3::new(self.x.ceil(), self.y.ceil(), self.z.ceil())
}
/// Returns the element-wise rounded value of this point.
#[inline]
pub fn round(self) -> Point3 {
Point3::new(self.x.round(), self.y.round(), self.z.round())
}
/// Returns the element-wise truncated value of this point.
#[inline]
pub fn trunc(self) -> Point3 {
Point3::new(self.x.trunc(), self.y.trunc(), self.z.trunc())
}
/// Returns the element-wise fractional value of this point.
#[inline]
pub fn fract(self) -> Point3 {
Point3::new(self.x.fract(), self.y.fract(), self.z.fract())
}
/// Returns the element-wise absolute value of this point.
#[inline]
pub fn abs(self) -> Point3 {
Point3::new(self.x.abs(), self.y.abs(), self.z.abs())
}
/// Returns a point with a permutation of the elements of this point.
#[inline]
pub fn permute(self, dim_x: Dimension3, dim_y: Dimension3, dim_z: Dimension3) -> Point3 {
Point3::new(self[dim_x], self[dim_y], self[dim_z])
}
}
impl MinMax for Point3 {
/// Returns the element-wise minimum of two points.
#[inline]
fn min(self, p: Point3) -> Point3 {
Point3::new(min(self.x, p.x), min(self.y, p.y), min(self.z, p.z))
}
/// Returns the element-wise maximum of two points.
#[inline]
fn max(self, p: Point3) -> Point3 {
Point3::new(max(self.x, p.x), max(self.y, p.y), max(self.z, p.z))
}
}
impl Distance for Point3 {
type Output = Scalar;
/// Computes and returns the distance between two points.
#[inline]
fn distance(self, p: Point3) -> Scalar {
(p - self).length()
}
}
impl RelativeDistance for Point3 {
/// Checks which of the points `p1` and `p2` is closest to this point and returns the closest one.
///
/// This is more computationally efficient than computing the distance between this point and the points `p1` and `p2` and comparing the distances,
/// because square root operations that are needed for computing the distances are avoided.
#[inline]
fn closest(self, p1: Point3, p2: Point3) -> Point3 {
let (dp1, dp2) = (p1 - self, p2 - self);
if dot(dp1, dp1) <= dot(dp2, dp2) { p1 } else { p2 }
}
/// Checks which of the points `p1` and `p2` is farthest from this point and returns the farthest one.
///
/// This is more computationally efficient than computing the distance between this point and the points `p1` and `p2` and comparing the distances,
/// because square root operations that are needed for computing the distances are avoided.
#[inline]
fn farthest(self, p1: Point3, p2: Point3) -> Point3 {
let (dp1, dp2) = (p1 - self, p2 - self);
if dot(dp1, dp1) > dot(dp2, dp2) { p1 } else { p2 }
}
}
impl Index<Dimension3> for Point3 {
type Output = Scalar;
#[inline]
fn index(&self, dim: Dimension3) -> &Scalar {
match dim {
Dimension3::X => &self.x,
Dimension3::Y => &self.y,
Dimension3::Z => &self.z,
}
}
}
impl IndexMut<Dimension3> for Point3 {
#[inline]
fn index_mut(&mut self, dim: Dimension3) -> &mut Scalar {
match dim {
Dimension3::X => &mut self.x,
Dimension3::Y => &mut self.y,
Dimension3::Z => &mut self.z,
}
}
}
impl Add<Vector3> for Point3 {
type Output = Point3;
#[inline]
fn add(self, v: Vector3) -> Point3 {
Point3::new(self.x + v.x, self.y + v.y, self.z + v.z)
}
}
impl AddAssign<Vector3> for Point3 {
#[inline]
fn add_assign(&mut self, v: Vector3) {
self.x += v.x;
self.y += v.y;
self.z += v.z;
}
}
impl Sub<Vector3> for Point3 {
type Output = Point3;
#[inline]
fn sub(self, v: Vector3) -> Point3 {
Point3::new(self.x - v.x, self.y - v.y, self.z - v.z)
}
}
impl SubAssign<Vector3> for Point3 {
#[inline]
fn sub_assign(&mut self, v: Vector3) {
self.x -= v.x;
self.y -= v.y;
self.z -= v.z;
}
}
impl Sub<Point3> for Point3 {
type Output = Vector3;
#[inline]
fn sub(self, p: Point3) -> Vector3 {
Vector3::new(self.x - p.x, self.y - p.y, self.z - p.z)
}
}
impl Neg for Point3 {
type Output = Point3;
#[inline]
fn neg(self) -> Point3 {
Point3::new(-self.x, -self.y, -self.z)
}
}
impl Mul<Scalar> for Point3 {
type Output = Point3;
#[inline]
fn mul(self, s: Scalar) -> Point3 {
Point3::new(self.x * s, self.y * s, self.z * s)
}
}
impl Mul<Point3> for Scalar {
type Output = Point3;
#[inline]
fn mul(self, p: Point3) -> Point3 {
p * self
}
}
impl MulAssign<Scalar> for Point3 {
#[inline]
fn mul_assign(&mut self, s: Scalar) {
self.x *= s;
self.y *= s;
self.z *= s;
}
}
impl Div<Scalar> for Point3 {
type Output = Point3;
#[inline]
fn div(self, s: Scalar) -> Point3 {
Point3::new(self.x / s, self.y / s, self.z / s)
}
}
impl DivAssign<Scalar> for Point3 {
#[inline]
fn div_assign(&mut self, s: Scalar) {
self.x /= s;
self.y /= s;
self.z /= s;
}
}
impl Transform<Point3> for Transform3 {
type Output = Point3;
/// Transforms a point.
#[inline]
fn transform(&self, p: Point3) -> Point3 {
&*self.forward * p
}
}
impl From<Vector3> for Point3 {
#[inline]
fn from(v: Vector3) -> Point3 {
Point3::new(v.x, v.y, v.z)
}
}
// ===== Vector3 ===============================================================================================================================================
impl Vector3 {
/// Creates and returns a new `Vector3` with x, y and z coordinates.
#[inline]
pub fn new(x: Scalar, y: Scalar, z: Scalar) -> Vector3 {
Vector3 { x, y, z }
}
/// Returns a `Vector3` which represents the zero vector (x = 0, y = 0 and z = 0).
#[inline]
pub fn zero() -> Vector3 {
Vector3::new(0.0, 0.0, 0.0)
}
/// Returns a `Vector3` of length 1 which represents the X axis (x = 1, y = 0 and z = 0).
#[inline]
pub fn x_axis() -> Vector3 {
Vector3::new(1.0, 0.0, 0.0)
}
/// Returns a `Vector3` of length 1 which represents the Y axis (x = 0, y = 1 and z = 0).
#[inline]
pub fn y_axis() -> Vector3 {
Vector3::new(0.0, 1.0, 0.0)
}
/// Returns a `Vector3` of length 1 which represents the Z axis (x = 0, y = 0 and z = 1).
#[inline]
pub fn z_axis() -> Vector3 {
Vector3::new(0.0, 0.0, 1.0)
}
/// Returns a `Vector3` of length 1 which represents the axis specified by a dimension.
#[inline]
pub fn axis(dim: Dimension3) -> Vector3 {
match dim {
Dimension3::X => Vector3::x_axis(),
Dimension3::Y => Vector3::y_axis(),
Dimension3::Z => Vector3::z_axis(),
}
}
/// Creates and returns a new `Vector3` which points in the same direction as this vector, but with length 1.
#[inline]
pub fn normalize(self) -> Vector3 {
self / self.length()
}
/// Returns the dimension with the smallest extent of this vector.
#[inline]
pub fn min_dimension(self) -> Dimension3 {
let Vector3 { x, y, z } = self.abs();
if x <= y && x <= z { Dimension3::X } else if y <= z { Dimension3::Y } else { Dimension3::Z }
}
/// Returns the dimension with the largest extent of this vector.
#[inline]
pub fn max_dimension(self) -> Dimension3 {
let Vector3 { x, y, z } = self.abs();
if x > y && x > z { Dimension3::X } else if y > z { Dimension3::Y } else { Dimension3::Z }
}
/// Returns the element-wise floor of this vector.
#[inline]
pub fn floor(self) -> Vector3 {
Vector3::new(self.x.floor(), self.y.floor(), self.z.floor())
}
/// Returns the element-wise ceiling of this vector.
#[inline]
pub fn ceil(self) -> Vector3 {
Vector3::new(self.x.ceil(), self.y.ceil(), self.z.ceil())
}
/// Returns the element-wise rounded value of this vector.
#[inline]
pub fn round(self) -> Vector3 {
Vector3::new(self.x.round(), self.y.round(), self.z.round())
}
/// Returns the element-wise truncated value of this vector.
#[inline]
pub fn trunc(self) -> Vector3 {
Vector3::new(self.x.trunc(), self.y.trunc(), self.z.trunc())
}
/// Returns the element-wise fractional value of this vector.
#[inline]
pub fn fract(self) -> Vector3 {
Vector3::new(self.x.fract(), self.y.fract(), self.z.fract())
}
/// Returns the element-wise absolute value of this vector.
#[inline]
pub fn abs(self) -> Vector3 {
Vector3::new(self.x.abs(), self.y.abs(), self.z.abs())
}
/// Returns a point with a permutation of the elements of this vector.
#[inline]
pub fn permute(self, dim_x: Dimension3, dim_y: Dimension3, dim_z: Dimension3) -> Vector3 {
Vector3::new(self[dim_x], self[dim_y], self[dim_z])
}
}
impl MinMax for Vector3 {
/// Returns the element-wise minimum of two vectors.
#[inline]
fn min(self, v: Vector3) -> Vector3 {
Vector3::new(min(self.x, v.x), min(self.y, v.y), min(self.z, v.z))
}
/// Returns the element-wise maximum of two vectors.
#[inline]
fn max(self, v: Vector3) -> Vector3 {
Vector3::new(max(self.x, v.x), max(self.y, v.y), max(self.z, v.z))
}
}
impl Length for Vector3 {
type Output = Scalar;
/// Computes and returns the length of a vector.
#[inline]
fn length(self) -> Scalar {
Scalar::sqrt(dot(self, self))
}
}
impl RelativeLength for Vector3 {
/// Returns the shortest of two vectors.
///
/// This is more computationally efficient than computing the lengths of the vectors and comparing them,
/// because square root operations that are needed for computing the lengths are avoided.
#[inline]
fn shortest(self, v: Vector3) -> Vector3 {
if dot(self, self) <= dot(v, v) { self } else { v }
}
/// Returns the longest of two vectors.
///
/// This is more computationally efficient than computing the lengths of the vectors and comparing them,
/// because square root operations that are needed for computing the lengths are avoided.
#[inline]
fn longest(self, v: Vector3) -> Vector3 {
if dot(self, self) > dot(v, v) { self } else { v }
}
}
impl DotProduct<Vector3> for Vector3 {
type Output = Scalar;
/// Computes and returns the dot product between two vectors.
#[inline]
fn dot(self, v: Vector3) -> Scalar {
self.x * v.x + self.y * v.y + self.z * v.z
}
}
impl DotProduct<Normal3> for Vector3 {
type Output = Scalar;
/// Computes and returns the dot product between this vector and a normal.
#[inline]
fn dot(self, n: Normal3) -> Scalar {
self.x * n.x + self.y * n.y + self.z * n.z
}
}
impl CrossProduct<Vector3> for Vector3 {
type Output = Vector3;
/// Computes and returns the cross product between two vectors.
#[inline]
fn cross(self, v: Vector3) -> Vector3 {
Vector3::new(self.y * v.z - self.z * v.y, self.z * v.x - self.x * v.z, self.x * v.y - self.y * v.x)
}
}
impl Index<Dimension3> for Vector3 {
type Output = Scalar;
#[inline]
fn index(&self, dim: Dimension3) -> &Scalar {
match dim {
Dimension3::X => &self.x,
Dimension3::Y => &self.y,
Dimension3::Z => &self.z,
}
}
}
impl IndexMut<Dimension3> for Vector3 {
#[inline]
fn index_mut(&mut self, dim: Dimension3) -> &mut Scalar {
match dim {
Dimension3::X => &mut self.x,
Dimension3::Y => &mut self.y,
Dimension3::Z => &mut self.z,
}
}
}
impl Add<Vector3> for Vector3 {
type Output = Vector3;
#[inline]
fn add(self, v: Vector3) -> Vector3 {
Vector3::new(self.x + v.x, self.y + v.y, self.z + v.z)
}
}
impl AddAssign<Vector3> for Vector3 {
#[inline]
fn add_assign(&mut self, v: Vector3) {
self.x += v.x;
self.y += v.y;
self.z += v.z;
}
}
impl Sub<Vector3> for Vector3 {
type Output = Vector3;
#[inline]
fn sub(self, v: Vector3) -> Vector3 {
Vector3::new(self.x - v.x, self.y - v.y, self.z - v.z)
}
}
impl SubAssign<Vector3> for Vector3 {
#[inline]
fn sub_assign(&mut self, v: Vector3) {
self.x -= v.x;
self.y -= v.y;
self.z -= v.z;
}
}
impl Neg for Vector3 {
type Output = Vector3;
#[inline]
fn neg(self) -> Vector3 {
Vector3::new(-self.x, -self.y, -self.z)
}
}
impl Mul<Scalar> for Vector3 {
type Output = Vector3;
#[inline]
fn mul(self, s: Scalar) -> Vector3 {
Vector3::new(self.x * s, self.y * s, self.z * s)
}
}
impl Mul<Vector3> for Scalar {
type Output = Vector3;
#[inline]
fn mul(self, v: Vector3) -> Vector3 {
v * self
}
}
impl MulAssign<Scalar> for Vector3 {
#[inline]
fn mul_assign(&mut self, s: Scalar) {
self.x *= s;
self.y *= s;
self.z *= s;
}
}
impl Div<Scalar> for Vector3 {
type Output = Vector3;
#[inline]
fn div(self, s: Scalar) -> Vector3 {
Vector3::new(self.x / s, self.y / s, self.z / s)
}
}
impl DivAssign<Scalar> for Vector3 {
#[inline]
fn div_assign(&mut self, s: Scalar) {
self.x /= s;
self.y /= s;
self.z /= s;
}
}
impl Transform<Vector3> for Transform3 {
type Output = Vector3;
/// Transforms a vector.
#[inline]
fn transform(&self, v: Vector3) -> Vector3 {
&*self.forward * v
}
}
impl From<Point3> for Vector3 {
#[inline]
fn from(p: Point3) -> Vector3 {
Vector3::new(p.x, p.y, p.z)
}
}
impl From<Normal3> for Vector3 {
#[inline]
fn from(n: Normal3) -> Self {
Vector3::new(n.x, n.y, n.z)
}
}
// ===== Normal3 ===============================================================================================================================================
impl Normal3 {
/// Creates and returns a new `Normal3` with x, y and z coordinates.
#[inline]
pub fn new(x: Scalar, y: Scalar, z: Scalar) -> Normal3 {
Normal3 { x, y, z }
}
/// Returns a `Normal3` which represents the zero normal (x = 0, y = 0 and z = 0).
#[inline]
pub fn zero() -> Normal3 {
Normal3::new(0.0, 0.0, 0.0)
}
/// Returns a `Normal3` of length 1 which represents the X axis (x = 1, y = 0 and z = 0).
#[inline]
pub fn x_axis() -> Normal3 {
Normal3::new(1.0, 0.0, 0.0)
}
/// Returns a `Normal3` of length 1 which represents the Y axis (x = 0, y = 1 and z = 0).
#[inline]
pub fn y_axis() -> Normal3 {
Normal3::new(0.0, 1.0, 0.0)
}
/// Returns a `Normal3` of length 1 which represents the Z axis (x = 0, y = 0 and z = 1).
#[inline]
pub fn z_axis() -> Normal3 {
Normal3::new(0.0, 0.0, 1.0)
}
/// Returns a `Normal3` of length 1 which represents the axis specified by a dimension.
#[inline]
pub fn axis(dim: Dimension3) -> Normal3 {
match dim {
Dimension3::X => Normal3::x_axis(),
Dimension3::Y => Normal3::y_axis(),
Dimension3::Z => Normal3::z_axis(),
}
}
/// Creates and returns a new `Normal3` which points in the same direction as this normal, but with length 1.
#[inline]
pub fn normalize(self) -> Normal3 {
self / self.length()
}
/// Returns the dimension with the smallest extent of this normal.
#[inline]
pub fn min_dimension(self) -> Dimension3 {
let Normal3 { x, y, z } = self.abs();
if x <= y && x <= z { Dimension3::X } else if y <= z { Dimension3::Y } else { Dimension3::Z }
}
/// Returns the dimension with the largest extent of this normal.
#[inline]
pub fn max_dimension(self) -> Dimension3 {
let Normal3 { x, y, z } = self.abs();
if x > y && x > z { Dimension3::X } else if y > z { Dimension3::Y } else { Dimension3::Z }
}
/// Returns the element-wise floor of this normal.
#[inline]
pub fn floor(self) -> Normal3 {
Normal3::new(self.x.floor(), self.y.floor(), self.z.floor())
}
/// Returns the element-wise ceiling of this normal.
#[inline]
pub fn ceil(self) -> Normal3 {
Normal3::new(self.x.ceil(), self.y.ceil(), self.z.ceil())
}
/// Returns the element-wise rounded value of this normal.
#[inline]
pub fn round(self) -> Normal3 {
Normal3::new(self.x.round(), self.y.round(), self.z.round())
}
/// Returns the element-wise truncated value of this normal.
#[inline]
pub fn trunc(self) -> Normal3 {
Normal3::new(self.x.trunc(), self.y.trunc(), self.z.trunc())
}
/// Returns the element-wise fractional value of this normal.
#[inline]
pub fn fract(self) -> Normal3 {
Normal3::new(self.x.fract(), self.y.fract(), self.z.fract())
}
/// Returns the element-wise absolute value of this normal.
#[inline]
pub fn abs(self) -> Normal3 {
Normal3::new(self.x.abs(), self.y.abs(), self.z.abs())
}
/// Returns a point with a permutation of the elements of this normal.
#[inline]
pub fn permute(self, dim_x: Dimension3, dim_y: Dimension3, dim_z: Dimension3) -> Normal3 {
Normal3::new(self[dim_x], self[dim_y], self[dim_z])
}
}
impl MinMax for Normal3 {
/// Returns the element-wise minimum of two normals.
#[inline]
fn min(self, n: Normal3) -> Normal3 {
Normal3::new(min(self.x, n.x), min(self.y, n.y), min(self.z, n.z))
}
/// Returns the element-wise maximum of two normals.
#[inline]
fn max(self, n: Normal3) -> Normal3 {
Normal3::new(max(self.x, n.x), max(self.y, n.y), max(self.z, n.z))
}
}
impl Length for Normal3 {
type Output = Scalar;
/// Computes and returns the length of a normal.
#[inline]
fn length(self) -> Scalar {
Scalar::sqrt(dot(self, self))
}
}
impl RelativeLength for Normal3 {
/// Returns the shortest of two normals.
///
/// This is more computationally efficient than computing the lengths of the normals and comparing them,
/// because square root operations that are needed for computing the lengths are avoided.
#[inline]
fn shortest(self, n: Normal3) -> Normal3 {
if dot(self, self) <= dot(n, n) { self } else { n }
}
/// Returns the longest of two normals.
///
/// This is more computationally efficient than computing the lengths of the normals and comparing them,
/// because square root operations that are needed for computing the lengths are avoided.
#[inline]
fn longest(self, n: Normal3) -> Normal3 {
if dot(self, self) > dot(n, n) { self } else { n }
}
}
impl DotProduct<Normal3> for Normal3 {
type Output = Scalar;
/// Computes and returns the dot product between two normals.
#[inline]
fn dot(self, n: Normal3) -> Scalar {
self.x * n.x + self.y * n.y + self.z * n.z
}
}
impl DotProduct<Vector3> for Normal3 {
type Output = Scalar;
/// Computes and returns the dot product between this normal and a vector.
#[inline]
fn dot(self, v: Vector3) -> Scalar {
self.x * v.x + self.y * v.y + self.z * v.z
}
}
impl Index<Dimension3> for Normal3 {
type Output = Scalar;
#[inline]
fn index(&self, dim: Dimension3) -> &Scalar {
match dim {
Dimension3::X => &self.x,
Dimension3::Y => &self.y,
Dimension3::Z => &self.z,
}
}
}
impl IndexMut<Dimension3> for Normal3 {
#[inline]
fn index_mut(&mut self, dim: Dimension3) -> &mut Scalar {
match dim {
Dimension3::X => &mut self.x,
Dimension3::Y => &mut self.y,
Dimension3::Z => &mut self.z,
}
}
}
impl Add<Normal3> for Normal3 {
type Output = Normal3;
#[inline]
fn add(self, n: Normal3) -> Normal3 {
Normal3::new(self.x + n.x, self.y + n.y, self.z + n.z)
}
}
impl AddAssign<Normal3> for Normal3 {
#[inline]
fn add_assign(&mut self, n: Normal3) {
self.x += n.x;
self.y += n.y;
self.z += n.z;
}
}
impl Sub<Normal3> for Normal3 {
type Output = Normal3;
#[inline]
fn sub(self, n: Normal3) -> Normal3 {
Normal3::new(self.x - n.x, self.y - n.y, self.z - n.z)
}
}
impl SubAssign<Normal3> for Normal3 {
#[inline]
fn sub_assign(&mut self, n: Normal3) {
self.x -= n.x;
self.y -= n.y;
self.z -= n.z;
}
}
impl Neg for Normal3 {
type Output = Normal3;
#[inline]
fn neg(self) -> Normal3 {
Normal3::new(-self.x, -self.y, -self.z)
}
}
impl Mul<Scalar> for Normal3 {
type Output = Normal3;
#[inline]
fn mul(self, s: Scalar) -> Normal3 {
Normal3::new(self.x * s, self.y * s, self.z * s)
}
}
impl Mul<Normal3> for Scalar {
type Output = Normal3;
#[inline]
fn mul(self, n: Normal3) -> Normal3 {
n * self
}
}
impl MulAssign<Scalar> for Normal3 {
#[inline]
fn mul_assign(&mut self, s: Scalar) {
self.x *= s;
self.y *= s;
self.z *= s;
}
}
impl Div<Scalar> for Normal3 {
type Output = Normal3;
#[inline]
fn div(self, s: Scalar) -> Normal3 {
Normal3::new(self.x / s, self.y / s, self.z / s)
}
}
impl DivAssign<Scalar> for Normal3 {
#[inline]
fn div_assign(&mut self, s: Scalar) {
self.x /= s;
self.y /= s;
self.z /= s;
}
}
impl Transform<Normal3> for Transform3 {
type Output = Normal3;
/// Transforms a normal.
///
/// Note that transforming a normal is different from transforming a vector; normals are transformed by applying the transpose of the inverse
/// transformation matrix. This difference is the main reason why there is a separate type for normals, which should be used instead of `Vector3`.
#[inline]
fn transform(&self, n: Normal3) -> Normal3 {
// Normals are transformed by the transpose of the inverse
Normal3::from(Vector3::from(n) * &*self.inverse)
}
}
impl From<Vector3> for Normal3 {
#[inline]
fn from(v: Vector3) -> Self {
Normal3::new(v.x, v.y, v.z)
}
}
// ===== Ray3 ==================================================================================================================================================
impl Ray3 {
/// Creates and returns a new `Ray3` with an origin point and direction vector.
#[inline]
pub fn new(origin: Point3, direction: Vector3) -> Ray3 {
Ray3 { origin, direction }
}
/// Computes and returns a point at a distance along this ray.
#[inline]
pub fn at(&self, distance: Scalar) -> Point3 {
self.origin + self.direction * distance
}
}
impl Transform<&Ray3> for Transform3 {
type Output = Ray3;
/// Transforms a ray.
#[inline]
fn transform(&self, ray: &Ray3) -> Ray3 {
Ray3::new(self.transform(ray.origin), self.transform(ray.direction))
}
}
// ===== BoundingBox3 ==========================================================================================================================================
impl BoundingBox3 {
/// Creates and returns a new `BoundingBox3` with minimum and maximum corner points.
#[inline]
pub fn new(min: Point3, max: Point3) -> BoundingBox3 {
BoundingBox3 { min, max }
}
/// Returns an empty `BoundingBox3`.
#[inline]
pub fn empty() -> BoundingBox3 {
BoundingBox3::new(
Point3::new(Scalar::INFINITY, Scalar::INFINITY, Scalar::INFINITY),
Point3::new(Scalar::NEG_INFINITY, Scalar::NEG_INFINITY, Scalar::NEG_INFINITY),
)
}
/// Returns an infinite `BoundingBox3` which contains all of 3D space.
#[inline]
pub fn infinite() -> BoundingBox3 {
BoundingBox3::new(
Point3::new(Scalar::NEG_INFINITY, Scalar::NEG_INFINITY, Scalar::NEG_INFINITY),
Point3::new(Scalar::INFINITY, Scalar::INFINITY, Scalar::INFINITY),
)
}
/// Returns the width (extent in the X dimension) of this bounding box.
#[inline]
pub fn width(&self) -> Scalar {
self.max.x - self.min.x
}
/// Returns the height (extent in the Y dimension) of this bounding box.
#[inline]
pub fn height(&self) -> Scalar {
self.max.y - self.min.y
}
/// Returns the depth (extent in the Z dimension) of this bounding box.
#[inline]
pub fn depth(&self) -> Scalar {
self.max.z - self.min.z
}
/// Returns the extent of this bounding box in a dimension.
#[inline]
pub fn extent(&self, dim: Dimension3) -> Scalar {
match dim {
Dimension3::X => self.width(),
Dimension3::Y => self.height(),
Dimension3::Z => self.depth(),
}
}
/// Returns the dimension with the smallest extent of this bounding box.
#[inline]
pub fn min_dimension(&self) -> Dimension3 {
let d = self.diagonal();
if d.x <= d.y && d.x <= d.z { Dimension3::X } else if d.y <= d.z { Dimension3::Y } else { Dimension3::Z }
}
/// Returns the dimension with the largest extent of this bounding box.
#[inline]
pub fn max_dimension(&self) -> Dimension3 {
let d = self.diagonal();
if d.x > d.y && d.x > d.z { Dimension3::X } else if d.y > d.z { Dimension3::Y } else { Dimension3::Z }
}
/// Returns the surface area of this bounding box.
#[inline]
pub fn surface_area(&self) -> Scalar {
let d = self.diagonal();
2.0 * (d.x * d.y + d.x * d.z + d.y * d.z)
}
/// Returns the volume (width times height times depth) of this bounding box.
#[inline]
pub fn volume(&self) -> Scalar {
let d = self.diagonal();
d.x * d.y * d.z
}
/// Returns the center point of this bounding box.
#[inline]
pub fn center(&self) -> Point3 {
self.min + self.diagonal() * 0.5
}
/// Returns a corner point of this bounding box, indicated by an index (which must be between 0 and 7 inclusive).
#[inline]
pub fn corner(&self, index: usize) -> Point3 {
debug_assert!(index < 8, "Invalid corner index: {}", index);
let x = if index & 0b001 == 0 { self.min.x } else { self.max.x };
let y = if index & 0b010 == 0 { self.min.y } else { self.max.y };
let z = if index & 0b100 == 0 { self.min.z } else { self.max.z };
Point3::new(x, y, z)
}
/// Returns the diagonal of this bounding box as a vector.
#[inline]
pub fn diagonal(&self) -> Vector3 {
self.max - self.min
}
/// Checks if two bounding boxes overlap.
#[inline]
pub fn overlaps(&self, bb: &BoundingBox3) -> bool {
//@formatter:off
self.max.x >= bb.min.x && self.min.x <= bb.max.x &&
self.max.y >= bb.min.y && self.min.y <= bb.max.y &&
self.max.z >= bb.min.z && self.min.z <= bb.max.z
//@formatter:on
}
/// Checks if a point is inside this bounding box.
#[inline]
pub fn is_inside(&self, p: Point3) -> bool {
//@formatter:off
p.x >= self.min.x && p.x <= self.max.x &&
p.y >= self.min.y && p.y <= self.max.y &&
p.z >= self.min.z && p.z <= self.max.z
//@formatter:on
}
/// Computes the closest intersection of this bounding box with a ray within a range.
///
/// Returns a `Some` containing the closest intersection, or `None` if the ray does not intersect the bounding box within the range.
pub fn intersect_ray(&self, ray: &Ray3, range: &Range<Scalar>) -> Option<Scalar> {
let (start, end) = (range.start, range.end);
let d1 = (self.min.x - ray.origin.x) / ray.direction.x;
let d2 = (self.max.x - ray.origin.x) / ray.direction.x;
let start = max(start, min(d1, d2));
let end = min(end, max(d1, d2));
if start > end {
return None;
}
let d1 = (self.min.y - ray.origin.y) / ray.direction.y;
let d2 = (self.max.y - ray.origin.y) / ray.direction.y;
let start = max(start, min(d1, d2));
let end = min(end, max(d1, d2));
if start > end {
return None;
}
let d1 = (self.min.z - ray.origin.z) / ray.direction.z;
let d2 = (self.max.z - ray.origin.z) / ray.direction.z;
let start = max(start, min(d1, d2));
let end = min(end, max(d1, d2));
if start <= end {
Some(start)
} else {
None
}
}
}
impl Union<&BoundingBox3> for BoundingBox3 {
type Output = BoundingBox3;
/// Computes and returns the union between two bounding boxes.
///
/// The union is the smallest bounding box that contains both bounding boxes.
#[inline]
fn union(self, bb: &BoundingBox3) -> BoundingBox3 {
BoundingBox3::new(min(self.min, bb.min), max(self.max, bb.max))
}
}
impl Union<Point3> for BoundingBox3 {
type Output = BoundingBox3;
/// Computes and returns the union between this bounding box and a point.
///
/// The union is the smallest bounding box that contains both the bounding box and the point.
#[inline]
fn union(self, p: Point3) -> BoundingBox3 {
BoundingBox3::new(min(self.min, p), max(self.max, p))
}
}
impl Intersection<&BoundingBox3> for BoundingBox3 {
type Output = BoundingBox3;
/// Computes and returns the intersection between two bounding boxes.
///
/// The intersection is the largest bounding box that contains the region where the two bounding boxes overlap.
///
/// Returns `Some` when the bounding boxes overlap; `None` if the bounding boxes do not overlap.
#[inline]
fn intersection(self, bb: &BoundingBox3) -> Option<BoundingBox3> {
if self.overlaps(bb) {
Some(BoundingBox3::new(max(self.min, bb.min), min(self.max, bb.max)))
} else {
None
}
}
}
impl Transform<&BoundingBox3> for Transform3 {
type Output = BoundingBox3;
/// Transforms a bounding box.
fn transform(&self, bb: &BoundingBox3) -> BoundingBox3 {
let o = self.transform(bb.min);
let d = self.transform(bb.diagonal());
let (mut min_corner, mut max_corner) = (o, o);
for i in 1..8 {
let mut corner = o;
if i & 0b001 != 0 { corner.x += d.x; }
if i & 0b010 != 0 { corner.y += d.y; }
if i & 0b100 != 0 { corner.z += d.z; }
min_corner = min(min_corner, corner);
max_corner = max(max_corner, corner);
}
BoundingBox3::new(min_corner, max_corner)
}
}
// ===== Matrix4x4 =============================================================================================================================================
impl Matrix4x4 {
/// Creates and returns a new `Matrix4x4` with the specified elements.
#[inline]
pub fn new(m: [Scalar; 16]) -> Matrix4x4 {
Matrix4x4 { m }
}
/// Returns a `Matrix4x4` which represents the identity matrix.
#[inline]
pub fn identity() -> Matrix4x4 {
Matrix4x4::new([
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
])
}
/// Returns a translation matrix which translates over a vector.
#[inline]
pub fn translate(v: Vector3) -> Matrix4x4 {
Matrix4x4::new([
1.0, 0.0, 0.0, v.x,
0.0, 1.0, 0.0, v.y,
0.0, 0.0, 1.0, v.z,
0.0, 0.0, 0.0, 1.0,
])
}
/// Returns a rotation matrix which rotates around the X axis.
#[inline]
pub fn rotate_x(angle: Scalar) -> Matrix4x4 {
let (sin, cos) = angle.sin_cos();
Matrix4x4::new([
1.0, 0.0, 0.0, 0.0,
0.0, cos, -sin, 0.0,
0.0, sin, cos, 0.0,
0.0, 0.0, 0.0, 1.0,
])
}
/// Returns a rotation matrix which rotates around the Y axis.
#[inline]
pub fn rotate_y(angle: Scalar) -> Matrix4x4 {
let (sin, cos) = angle.sin_cos();
Matrix4x4::new([
cos, 0.0, sin, 0.0,
0.0, 1.0, 0.0, 0.0,
-sin, 0.0, cos, 0.0,
0.0, 0.0, 0.0, 1.0,
])
}
/// Returns a rotation matrix which rotates around the Y axis.
#[inline]
pub fn rotate_z(angle: Scalar) -> Matrix4x4 {
let (sin, cos) = angle.sin_cos();
Matrix4x4::new([
cos, -sin, 0.0, 0.0,
sin, cos, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
])
}
/// Returns a rotation matrix which rotates around an axis.
#[inline]
pub fn rotate_axis(axis: Vector3, angle: Scalar) -> Matrix4x4 {
let a = axis.normalize();
let (s, c) = angle.sin_cos();
let cc = 1.0 - c;
let (t1, t2, t3) = (a.x * a.y * cc, a.x * a.z * cc, a.y * a.z * cc);
let (u1, u2, u3) = (a.x * s, a.y * s, a.z * s);
Matrix4x4::new([
a.x * a.x * cc + c, t1 - u3, t2 + u2, 0.0,
t1 + u3, a.y * a.y * cc + c, t3 - u1, 0.0,
t2 - u2, t3 + u1, a.z * a.z * cc + c, 0.0,
0.0, 0.0, 0.0, 1.0,
])
}
/// Returns a matrix which scales by factors in the X, Y and Z dimensions.
#[inline]
pub fn scale(sx: Scalar, sy: Scalar, sz: Scalar) -> Matrix4x4 {
Matrix4x4::new([
sx, 0.0, 0.0, 0.0,
0.0, sy, 0.0, 0.0,
0.0, 0.0, sz, 0.0,
0.0, 0.0, 0.0, 1.0,
])
}
/// Returns a matrix which scales uniformly in all dimensions by a factor.
#[inline]
pub fn scale_uniform(s: Scalar) -> Matrix4x4 {
Matrix4x4::new([
s, 0.0, 0.0, 0.0,
0.0, s, 0.0, 0.0,
0.0, 0.0, s, 0.0,
0.0, 0.0, 0.0, 1.0,
])
}
/// Returns the inverse of a look-at transformation matrix which looks from a point at a target, with an 'up' direction.
#[inline]
pub fn inverse_look_at(from: Point3, target: Point3, up: Vector3) -> Matrix4x4 {
let direction = (target - from).normalize();
let right = up.normalize().cross(direction).normalize();
let new_up = direction.cross(right);
Matrix4x4::new([
right.x, new_up.x, direction.x, from.x,
right.y, new_up.y, direction.y, from.y,
right.z, new_up.z, direction.z, from.z,
0.0, 0.0, 0.0, 1.0,
])
}
/// Returns an element at a row and column of the matrix.
#[inline]
pub fn get(&self, row: usize, col: usize) -> Scalar {
debug_assert!(row < 4, "Invalid row index: {}", row);
debug_assert!(col < 4, "Invalid column index: {}", row);
self.m[row * 4 + col]
}
/// Returns a mutable reference to an element at a row and column of the matrix.
#[inline]
pub fn get_mut(&mut self, row: usize, col: usize) -> &mut Scalar {
debug_assert!(row < 4, "Invalid row index: {}", row);
debug_assert!(col < 4, "Invalid column index: {}", row);
&mut self.m[row * 4 + col]
}
/// Sets the value of an element at a row and column of the matrix.
#[inline]
pub fn set(&mut self, row: usize, col: usize, value: Scalar) {
debug_assert!(row < 4, "Invalid row index: {}", row);
debug_assert!(col < 4, "Invalid column index: {}", row);
self.m[row * 4 + col] = value;
}
/// Returns the transpose of this matrix.
#[inline]
pub fn transpose(&self) -> Matrix4x4 {
Matrix4x4::new([
self.m[0], self.m[4], self.m[8], self.m[12],
self.m[1], self.m[5], self.m[9], self.m[13],
self.m[2], self.m[6], self.m[10], self.m[14],
self.m[3], self.m[7], self.m[11], self.m[15],
])
}
/// Computes and returns the inverse of this matrix.
///
/// If this matrix is singular, a `NonInvertibleMatrixError` is returned.
pub fn inverse(&self) -> Result<Matrix4x4, NonInvertibleMatrixError> {
let cofactor = |i, j| {
let sub = |row, col| self.get(if row < i { row } else { row + 1 }, if col < j { col } else { col + 1 });
let sign = if (i + j) % 2 == 0 { 1.0 } else { -1.0 };
sign * (sub(0, 0) * sub(1, 1) * sub(2, 2) + sub(0, 1) * sub(1, 2) * sub(2, 0) + sub(0, 2) * sub(1, 0) * sub(2, 1)
- sub(0, 0) * sub(1, 2) * sub(2, 1) - sub(0, 1) * sub(1, 0) * sub(2, 2) - sub(0, 2) * sub(1, 1) * sub(2, 0))
};
let adjugate = Matrix4x4::new([
cofactor(0, 0), cofactor(1, 0), cofactor(2, 0), cofactor(3, 0),
cofactor(0, 1), cofactor(1, 1), cofactor(2, 1), cofactor(3, 1),
cofactor(0, 2), cofactor(1, 2), cofactor(2, 2), cofactor(3, 2),
cofactor(0, 3), cofactor(1, 3), cofactor(2, 3), cofactor(3, 3),
]);
let det = self.m[0] * adjugate.m[0] + self.m[1] * adjugate.m[4] + self.m[2] * adjugate.m[8] + self.m[3] * adjugate.m[12];
if det != 0.0 {
Ok(&adjugate * det.recip())
} else {
Err(NonInvertibleMatrixError)
}
}
}
impl Mul<Scalar> for &Matrix4x4 {
type Output = Matrix4x4;
#[inline]
fn mul(self, s: Scalar) -> Matrix4x4 {
Matrix4x4::new(array![|i| self.m[i] * s; 16])
}
}
impl Mul<&Matrix4x4> for Scalar {
type Output = Matrix4x4;
#[inline]
fn mul(self, m: &Matrix4x4) -> Matrix4x4 {
m * self
}
}
impl MulAssign<Scalar> for &mut Matrix4x4 {
#[inline]
fn mul_assign(&mut self, s: Scalar) {
for m in &mut self.m { *m *= s; }
}
}
impl Div<Scalar> for &Matrix4x4 {
type Output = Matrix4x4;
#[inline]
fn div(self, s: Scalar) -> Matrix4x4 {
Matrix4x4::new(array![|i| self.m[i] / s; 16])
}
}
impl DivAssign<Scalar> for &mut Matrix4x4 {
#[inline]
fn div_assign(&mut self, s: Scalar) {
for m in &mut self.m { *m /= s; }
}
}
impl Mul<Point3> for &Matrix4x4 {
type Output = Point3;
#[inline]
fn mul(self, p: Point3) -> Point3 {
let x = self.m[0] * p.x + self.m[1] * p.y + self.m[2] * p.z + self.m[3];
let y = self.m[4] * p.x + self.m[5] * p.y + self.m[6] * p.z + self.m[7];
let z = self.m[8] * p.x + self.m[9] * p.y + self.m[10] * p.z + self.m[11];
let w = self.m[12] * p.x + self.m[13] * p.y + self.m[14] * p.z + self.m[15];
Point3::new(x / w, y / w, z / w)
}
}
impl Mul<&Matrix4x4> for Point3 {
type Output = Point3;
#[inline]
fn mul(self, m: &Matrix4x4) -> Point3 {
let x = self.x * m.m[0] + self.y * m.m[4] + self.z * m.m[8] + m.m[12];
let y = self.x * m.m[1] + self.y * m.m[5] + self.z * m.m[9] + m.m[13];
let z = self.x * m.m[2] + self.y * m.m[6] + self.z * m.m[10] + m.m[14];
let w = self.x * m.m[3] + self.y * m.m[7] + self.z * m.m[11] + m.m[15];
Point3::new(x / w, y / w, z / w)
}
}
impl Mul<Vector3> for &Matrix4x4 {
type Output = Vector3;
#[inline]
fn mul(self, v: Vector3) -> Vector3 {
let x = self.m[0] * v.x + self.m[1] * v.y + self.m[2] * v.z;
let y = self.m[4] * v.x + self.m[5] * v.y + self.m[6] * v.z;
let z = self.m[8] * v.x + self.m[9] * v.y + self.m[10] * v.z;
Vector3::new(x, y, z)
}
}
impl Mul<&Matrix4x4> for Vector3 {
type Output = Vector3;
#[inline]
fn mul(self, m: &Matrix4x4) -> Vector3 {
let x = self.x * m.m[0] + self.y * m.m[4] + self.z * m.m[8];
let y = self.x * m.m[1] + self.y * m.m[5] + self.z * m.m[9];
let z = self.x * m.m[2] + self.y * m.m[6] + self.z * m.m[10];
Vector3::new(x, y, z)
}
}
impl Mul<&Matrix4x4> for &Matrix4x4 {
type Output = Matrix4x4;
#[inline]
fn mul(self, m: &Matrix4x4) -> Matrix4x4 {
Matrix4x4::new([
self.m[0] * m.m[0] + self.m[1] * m.m[4] + self.m[2] * m.m[8] + self.m[3] * m.m[12],
self.m[0] * m.m[1] + self.m[1] * m.m[5] + self.m[2] * m.m[9] + self.m[3] * m.m[13],
self.m[0] * m.m[2] + self.m[1] * m.m[6] + self.m[2] * m.m[10] + self.m[3] * m.m[14],
self.m[0] * m.m[3] + self.m[1] * m.m[7] + self.m[2] * m.m[11] + self.m[3] * m.m[15],
self.m[4] * m.m[0] + self.m[5] * m.m[4] + self.m[6] * m.m[8] + self.m[7] * m.m[12],
self.m[4] * m.m[1] + self.m[5] * m.m[5] + self.m[6] * m.m[9] + self.m[7] * m.m[13],
self.m[4] * m.m[2] + self.m[5] * m.m[6] + self.m[6] * m.m[10] + self.m[7] * m.m[14],
self.m[4] * m.m[3] + self.m[5] * m.m[7] + self.m[6] * m.m[11] + self.m[7] * m.m[15],
self.m[8] * m.m[0] + self.m[9] * m.m[4] + self.m[10] * m.m[8] + self.m[11] * m.m[12],
self.m[8] * m.m[1] + self.m[9] * m.m[5] + self.m[10] * m.m[9] + self.m[11] * m.m[13],
self.m[8] * m.m[2] + self.m[9] * m.m[6] + self.m[10] * m.m[10] + self.m[11] * m.m[14],
self.m[8] * m.m[3] + self.m[9] * m.m[7] + self.m[10] * m.m[11] + self.m[11] * m.m[15],
self.m[12] * m.m[0] + self.m[13] * m.m[4] + self.m[14] * m.m[8] + self.m[15] * m.m[12],
self.m[12] * m.m[1] + self.m[13] * m.m[5] + self.m[14] * m.m[9] + self.m[15] * m.m[13],
self.m[12] * m.m[2] + self.m[13] * m.m[6] + self.m[14] * m.m[10] + self.m[15] * m.m[14],
self.m[12] * m.m[3] + self.m[13] * m.m[7] + self.m[14] * m.m[11] + self.m[15] * m.m[15],
])
}
}
// ===== Transform3 ============================================================================================================================================
impl Transform3 {
/// Creates and returns a new `Transform3` with a transformation matrix and its inverse.
#[inline]
pub fn new(forward: Arc<Matrix4x4>, inverse: Arc<Matrix4x4>) -> Transform3 {
Transform3 { forward, inverse }
}
/// Returns a `Transform3` which represents the identity transform.
#[inline]
pub fn identity() -> Transform3 {
let forward = Arc::new(Matrix4x4::identity());
let inverse = forward.clone();
Transform3::new(forward, inverse)
}
/// Returns a translation transform over a vector.
#[inline]
pub fn translate(v: Vector3) -> Transform3 {
Transform3::new(Arc::new(Matrix4x4::translate(v)), Arc::new(Matrix4x4::translate(-v)))
}
/// Returns a rotation transform which rotates around the X axis.
#[inline]
pub fn rotate_x(angle: Scalar) -> Transform3 {
let forward = Matrix4x4::rotate_x(angle);
let inverse = forward.transpose();
Transform3::new(Arc::new(forward), Arc::new(inverse))
}
/// Returns a rotation transform which rotates around the Y axis.
#[inline]
pub fn rotate_y(angle: Scalar) -> Transform3 {
let forward = Matrix4x4::rotate_y(angle);
let inverse = forward.transpose();
Transform3::new(Arc::new(forward), Arc::new(inverse))
}
/// Returns a rotation transform which rotates around the Z axis.
#[inline]
pub fn rotate_z(angle: Scalar) -> Transform3 {
let forward = Matrix4x4::rotate_z(angle);
let inverse = forward.transpose();
Transform3::new(Arc::new(forward), Arc::new(inverse))
}
/// Returns a rotation transform which rotates around an axis.
#[inline]
pub fn rotate_axis(axis: Vector3, angle: Scalar) -> Transform3 {
let forward = Matrix4x4::rotate_axis(axis, angle);
let inverse = forward.transpose();
Transform3::new(Arc::new(forward), Arc::new(inverse))
}
/// Returns a transform which scales by factors in the X, Y and Z dimensions.
#[inline]
pub fn scale(sx: Scalar, sy: Scalar, sz: Scalar) -> Transform3 {
Transform3::new(Arc::new(Matrix4x4::scale(sx, sy, sz)), Arc::new(Matrix4x4::scale(sx.recip(), sy.recip(), sz.recip())))
}
/// Returns a transform which scales uniformly in all dimensions by a factor.
#[inline]
pub fn scale_uniform(s: Scalar) -> Transform3 {
Transform3::new(Arc::new(Matrix4x4::scale_uniform(s)), Arc::new(Matrix4x4::scale_uniform(s.recip())))
}
// TODO: factory methods for perspective and orthographic projection matrices and transforms
/// Returns a look-at transform which looks from a point at a target, with an 'up' direction.
#[inline]
pub fn look_at(from: Point3, target: Point3, up: Vector3) -> Result<Transform3, NonInvertibleMatrixError> {
let inverse = Matrix4x4::inverse_look_at(from, target, up);
let forward = inverse.inverse()?;
Ok(Transform3::new(Arc::new(forward), Arc::new(inverse)))
}
/// Computes and returns a composite transform, which first applies this and then the other transform.
#[inline]
pub fn and_then(&self, transform: &Transform3) -> Transform3 {
Transform3::new(Arc::new(&*transform.forward * &*self.forward), Arc::new(&*self.inverse * &*transform.inverse))
}
/// Returns the inverse of this transform.
#[inline]
pub fn inverse(&self) -> Transform3 {
Transform3::new(self.inverse.clone(), self.forward.clone())
}
}
impl TryFrom<Matrix4x4> for Transform3 {
type Error = NonInvertibleMatrixError;
#[inline]
fn try_from(forward: Matrix4x4) -> Result<Transform3, NonInvertibleMatrixError> {
let inverse = forward.inverse()?;
Ok(Transform3::new(Arc::new(forward), Arc::new(inverse)))
}
}
// ===== Tests =================================================================================================================================================
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn scalar_min() {
let s1: Scalar = -1.0;
let s2: Scalar = 2.0;
assert_eq!(min(s1, s2), -1.0);
}
#[test]
fn scalar_max() {
let s1: Scalar = -1.0;
let s2: Scalar = 2.0;
assert_eq!(max(s1, s2), 2.0);
}
#[test]
fn point2_new() {
let p = Point2::new(-1.0, 2.0);
assert_eq!(p.x, -1.0);
assert_eq!(p.y, 2.0);
}
#[test]
fn point2_origin() {
let p = Point2::origin();
assert_eq!(p.x, 0.0);
assert_eq!(p.y, 0.0);
}
#[test]
fn point2_min_dimension() {
assert_eq!(Point2::new(-1.0, 2.0).min_dimension(), Dimension2::X);
assert_eq!(Point2::new(-3.0, 2.0).min_dimension(), Dimension2::Y);
}
#[test]
fn point2_max_dimension() {
assert_eq!(Point2::new(-1.0, 2.0).max_dimension(), Dimension2::Y);
assert_eq!(Point2::new(-3.0, 2.0).max_dimension(), Dimension2::X);
}
#[test]
fn point2_floor() {
assert_eq!(Point2::new(-1.3, 2.6).floor(), Point2::new(-2.0, 2.0));
}
#[test]
fn point2_ceil() {
assert_eq!(Point2::new(-1.3, 2.6).ceil(), Point2::new(-1.0, 3.0));
}
#[test]
fn point2_round() {
assert_eq!(Point2::new(-1.6, 2.3).round(), Point2::new(-2.0, 2.0));
}
#[test]
fn point2_trunc() {
assert_eq!(Point2::new(-1.3, 2.6).trunc(), Point2::new(-1.0, 2.0));
}
#[test]
fn point2_fract() {
assert_eq!(Point2::new(-1.25, 2.5).fract(), Point2::new(-0.25, 0.5));
}
#[test]
fn point2_abs() {
assert_eq!(Point2::new(-1.3, 2.6).abs(), Point2::new(1.3, 2.6));
}
#[test]
fn point2_permute() {
assert_eq!(Point2::new(1.0, 2.0).permute(Dimension2::X, Dimension2::X), Point2::new(1.0, 1.0));
assert_eq!(Point2::new(1.0, 2.0).permute(Dimension2::X, Dimension2::Y), Point2::new(1.0, 2.0));
assert_eq!(Point2::new(1.0, 2.0).permute(Dimension2::Y, Dimension2::X), Point2::new(2.0, 1.0));
assert_eq!(Point2::new(1.0, 2.0).permute(Dimension2::Y, Dimension2::Y), Point2::new(2.0, 2.0));
}
#[test]
fn point2_min() {
assert_eq!(min(Point2::new(-1.0, 2.0), Point2::new(-3.0, 2.5)), Point2::new(-3.0, 2.0));
}
#[test]
fn point2_max() {
assert_eq!(max(Point2::new(-1.0, 2.0), Point2::new(-3.0, 2.5)), Point2::new(-1.0, 2.5));
}
#[test]
fn point2_distance() {
assert_eq!(distance(Point2::new(4.0, 1.0), Point2::new(1.0, 5.0)), 5.0);
}
#[test]
fn point2_closest() {
let p1 = Point2::new(4.0, 1.0);
let p2 = Point2::new(1.0, 5.0);
assert_eq!(Point2::new(-1.0, 2.0).closest(p1, p2), p2);
}
#[test]
fn point2_farthest() {
let p1 = Point2::new(4.0, 1.0);
let p2 = Point2::new(1.0, 5.0);
assert_eq!(Point2::new(-1.0, 2.0).farthest(p1, p2), p1);
}
#[test]
fn point2_index() {
let p = Point2::new(1.0, 2.0);
assert_eq!(p[Dimension2::X], 1.0);
assert_eq!(p[Dimension2::Y], 2.0);
}
#[test]
fn point2_index_mut() {
let mut p = Point2::new(1.0, 2.0);
p[Dimension2::X] = 3.0;
p[Dimension2::Y] = -1.0;
assert_eq!(p, Point2::new(3.0, -1.0));
}
#[test]
fn point2_add_vector2() {
let p = Point2::new(1.0, 2.0);
let v = Vector2::new(-0.5, 1.5);
assert_eq!(p + v, Point2::new(0.5, 3.5));
}
#[test]
fn point2_add_assign_vector2() {
let mut p = Point2::new(1.0, 2.0);
let v = Vector2::new(-0.5, 1.5);
p += v;
assert_eq!(p, Point2::new(0.5, 3.5));
}
#[test]
fn point2_sub_vector2() {
let p = Point2::new(1.0, 2.0);
let v = Vector2::new(-0.5, 1.5);
assert_eq!(p - v, Point2::new(1.5, 0.5));
}
#[test]
fn point2_sub_assign_vector2() {
let mut p = Point2::new(1.0, 2.0);
let v = Vector2::new(-0.5, 1.5);
p -= v;
assert_eq!(p, Point2::new(1.5, 0.5));
}
#[test]
fn point2_sub_point2() {
let p1 = Point2::new(4.0, 2.0);
let p2 = Point2::new(1.0, 5.0);
assert_eq!(p1 - p2, Vector2::new(3.0, -3.0));
}
#[test]
fn point2_neg() {
assert_eq!(-Point2::new(1.0, -2.0), Point2::new(-1.0, 2.0));
}
#[test]
fn point2_mul_scalar() {
assert_eq!(Point2::new(2.5, -1.5) * 2.0, Point2::new(5.0, -3.0));
}
#[test]
fn scalar_mul_point2() {
assert_eq!(2.0 * Point2::new(2.5, -1.5), Point2::new(5.0, -3.0));
}
#[test]
fn point2_mul_assign_scalar() {
let mut p = Point2::new(2.5, -1.5);
p *= 2.0;
assert_eq!(p, Point2::new(5.0, -3.0));
}
#[test]
fn point2_div_scalar() {
assert_eq!(Point2::new(2.5, -1.5) / 2.0, Point2::new(1.25, -0.75));
}
#[test]
fn point2_div_assign_scalar() {
let mut p = Point2::new(2.5, -1.5);
p /= 2.0;
assert_eq!(p, Point2::new(1.25, -0.75));
}
#[test]
fn point2_from_vector2() {
let p = Point2::from(Vector2::new(1.0, 2.0));
assert_eq!(p, Point2::new(1.0, 2.0));
}
#[test]
fn vector2_new() {
let v = Vector2::new(-1.0, 2.0);
assert_eq!(v.x, -1.0);
assert_eq!(v.y, 2.0);
}
#[test]
fn vector2_zero() {
let v = Vector2::zero();
assert_eq!(v.x, 0.0);
assert_eq!(v.y, 0.0);
}
#[test]
fn vector2_x_axis() {
let v = Vector2::x_axis();
assert_eq!(v.x, 1.0);
assert_eq!(v.y, 0.0);
}
#[test]
fn vector2_y_axis() {
let v = Vector2::y_axis();
assert_eq!(v.x, 0.0);
assert_eq!(v.y, 1.0);
}
#[test]
fn vector2_axis() {
assert_eq!(Vector2::axis(Dimension2::X), Vector2::x_axis());
assert_eq!(Vector2::axis(Dimension2::Y), Vector2::y_axis());
}
#[test]
fn vector2_normalize() {
let v = Vector2::new(3.0, -2.0);
assert_eq!(v.normalize(), v / Scalar::sqrt(13.0));
}
#[test]
fn vector2_min_dimension() {
assert_eq!(Vector2::new(-1.0, 2.0).min_dimension(), Dimension2::X);
assert_eq!(Vector2::new(-3.0, 2.0).min_dimension(), Dimension2::Y);
}
#[test]
fn vector2_max_dimension() {
assert_eq!(Vector2::new(-1.0, 2.0).max_dimension(), Dimension2::Y);
assert_eq!(Vector2::new(-3.0, 2.0).max_dimension(), Dimension2::X);
}
#[test]
fn vector2_floor() {
assert_eq!(Vector2::new(-1.3, 2.6).floor(), Vector2::new(-2.0, 2.0));
}
#[test]
fn vector2_ceil() {
assert_eq!(Vector2::new(-1.3, 2.6).ceil(), Vector2::new(-1.0, 3.0));
}
#[test]
fn vector2_round() {
assert_eq!(Vector2::new(-1.6, 2.3).round(), Vector2::new(-2.0, 2.0));
}
#[test]
fn vector2_trunc() {
assert_eq!(Vector2::new(-1.3, 2.6).trunc(), Vector2::new(-1.0, 2.0));
}
#[test]
fn vector2_fract() {
assert_eq!(Vector2::new(-1.25, 2.5).fract(), Vector2::new(-0.25, 0.5));
}
#[test]
fn vector2_abs() {
assert_eq!(Vector2::new(-1.3, 2.6).abs(), Vector2::new(1.3, 2.6));
}
#[test]
fn vector2_permute() {
assert_eq!(Vector2::new(1.0, 2.0).permute(Dimension2::X, Dimension2::X), Vector2::new(1.0, 1.0));
assert_eq!(Vector2::new(1.0, 2.0).permute(Dimension2::X, Dimension2::Y), Vector2::new(1.0, 2.0));
assert_eq!(Vector2::new(1.0, 2.0).permute(Dimension2::Y, Dimension2::X), Vector2::new(2.0, 1.0));
assert_eq!(Vector2::new(1.0, 2.0).permute(Dimension2::Y, Dimension2::Y), Vector2::new(2.0, 2.0));
}
#[test]
fn vector2_min() {
assert_eq!(min(Vector2::new(-1.0, 2.0), Vector2::new(-3.0, 2.5)), Vector2::new(-3.0, 2.0));
}
#[test]
fn vector2_max() {
assert_eq!(max(Vector2::new(-1.0, 2.0), Vector2::new(-3.0, 2.5)), Vector2::new(-1.0, 2.5));
}
#[test]
fn vector2_length() {
assert_eq!(Vector2::new(3.0, 4.0).length(), 5.0);
}
#[test]
fn vector2_shortest() {
let v1 = Vector2::new(-1.0, -3.0);
let v2 = Vector2::new(2.0, 1.5);
assert_eq!(shortest(v1, v2), v2);
}
#[test]
fn vector2_longest() {
let v1 = Vector2::new(-1.0, -3.0);
let v2 = Vector2::new(2.0, 1.5);
assert_eq!(longest(v1, v2), v1);
}
#[test]
fn vector2_dot_vector2() {
let v1 = Vector2::new(-1.0, -3.0);
let v2 = Vector2::new(2.0, 1.5);
assert_eq!(dot(v1, v2), -6.5);
}
#[test]
fn vector2_index() {
let v = Vector2::new(1.0, 2.0);
assert_eq!(v[Dimension2::X], 1.0);
assert_eq!(v[Dimension2::Y], 2.0);
}
#[test]
fn vector2_index_mut() {
let mut v = Vector2::new(1.0, 2.0);
v[Dimension2::X] = 3.0;
v[Dimension2::Y] = -1.0;
assert_eq!(v, Vector2::new(3.0, -1.0));
}
#[test]
fn vector2_add_vector2() {
let v1 = Vector2::new(1.0, 2.0);
let v2 = Vector2::new(-0.5, 1.5);
assert_eq!(v1 + v2, Vector2::new(0.5, 3.5));
}
#[test]
fn vector2_add_assign_vector2() {
let mut v1 = Vector2::new(1.0, 2.0);
let v2 = Vector2::new(-0.5, 1.5);
v1 += v2;
assert_eq!(v1, Vector2::new(0.5, 3.5));
}
#[test]
fn vector2_sub_vector2() {
let v1 = Vector2::new(1.0, 2.0);
let v2 = Vector2::new(-0.5, 1.5);
assert_eq!(v1 - v2, Vector2::new(1.5, 0.5));
}
#[test]
fn vector2_sub_assign_vector2() {
let mut v1 = Vector2::new(1.0, 2.0);
let v2 = Vector2::new(-0.5, 1.5);
v1 -= v2;
assert_eq!(v1, Vector2::new(1.5, 0.5));
}
#[test]
fn vector2_neg() {
assert_eq!(-Vector2::new(1.0, -2.0), Vector2::new(-1.0, 2.0));
}
#[test]
fn vector2_mul_scalar() {
assert_eq!(Vector2::new(2.5, -1.5) * 2.0, Vector2::new(5.0, -3.0));
}
#[test]
fn scalar_mul_vector2() {
assert_eq!(2.0 * Vector2::new(2.5, -1.5), Vector2::new(5.0, -3.0));
}
#[test]
fn vector2_mul_assign_scalar() {
let mut v = Vector2::new(2.5, -1.5);
v *= 2.0;
assert_eq!(v, Vector2::new(5.0, -3.0));
}
#[test]
fn vector2_div_scalar() {
assert_eq!(Vector2::new(2.5, -1.5) / 2.0, Vector2::new(1.25, -0.75));
}
#[test]
fn vector2_div_assign_scalar() {
let mut v = Vector2::new(2.5, -1.5);
v /= 2.0;
assert_eq!(v, Vector2::new(1.25, -0.75));
}
#[test]
fn vector2_from_point2() {
let v = Vector2::from(Point2::new(1.0, 2.0));
assert_eq!(v, Vector2::new(1.0, 2.0));
}
#[test]
fn ray2_new() {
let r = Ray2::new(Point2::new(1.0, 2.0), Vector2::new(-1.5, 0.5));
assert_eq!(r.origin, Point2::new(1.0, 2.0));
assert_eq!(r.direction, Vector2::new(-1.5, 0.5));
}
#[test]
fn ray2_at() {
let r = Ray2::new(Point2::new(1.0, 2.0), Vector2::new(-1.5, 0.5));
assert_eq!(r.at(2.5), Point2::new(-2.75, 3.25));
}
#[test]
fn bounding_box2_new() {
let bb = BoundingBox2::new(Point2::new(1.0, -0.5), Point2::new(5.0, 4.0));
assert_eq!(bb.min, Point2::new(1.0, -0.5));
assert_eq!(bb.max, Point2::new(5.0, 4.0));
}
#[test]
fn bounding_box2_empty() {
let bb = BoundingBox2::empty();
assert_eq!(bb.min, Point2::new(Scalar::INFINITY, Scalar::INFINITY));
assert_eq!(bb.max, Point2::new(Scalar::NEG_INFINITY, Scalar::NEG_INFINITY));
}
#[test]
fn bounding_box2_infinite() {
let bb = BoundingBox2::infinite();
assert_eq!(bb.min, Point2::new(Scalar::NEG_INFINITY, Scalar::NEG_INFINITY));
assert_eq!(bb.max, Point2::new(Scalar::INFINITY, Scalar::INFINITY));
}
#[test]
fn bounding_box2_width() {
let bb = BoundingBox2::new(Point2::new(1.0, -0.5), Point2::new(5.0, 4.0));
assert_eq!(bb.width(), 4.0);
}
#[test]
fn bounding_box2_height() {
let bb = BoundingBox2::new(Point2::new(1.0, -0.5), Point2::new(5.0, 4.0));
assert_eq!(bb.height(), 4.5);
}
#[test]
fn bounding_box2_extent() {
let bb = BoundingBox2::new(Point2::new(1.0, -0.5), Point2::new(5.0, 4.0));
assert_eq!(bb.extent(Dimension2::X), 4.0);
assert_eq!(bb.extent(Dimension2::Y), 4.5);
}
#[test]
fn bounding_box2_min_dimension() {
let bb = BoundingBox2::new(Point2::new(1.0, -0.5), Point2::new(5.0, 4.0));
assert_eq!(bb.min_dimension(), Dimension2::X);
}
#[test]
fn bounding_box2_max_dimension() {
let bb = BoundingBox2::new(Point2::new(1.0, -0.5), Point2::new(5.0, 4.0));
assert_eq!(bb.max_dimension(), Dimension2::Y);
}
#[test]
fn bounding_box2_area() {
let bb = BoundingBox2::new(Point2::new(1.0, -0.5), Point2::new(5.0, 4.0));
assert_eq!(bb.area(), 18.0);
}
#[test]
fn bounding_box2_center() {
let bb = BoundingBox2::new(Point2::new(1.0, -0.5), Point2::new(5.0, 4.0));
assert_eq!(bb.center(), Point2::new(3.0, 1.75));
}
#[test]
fn bounding_box2_corner() {
let bb = BoundingBox2::new(Point2::new(1.0, -0.5), Point2::new(5.0, 4.0));
assert_eq!(bb.corner(0), Point2::new(1.0, -0.5));
assert_eq!(bb.corner(1), Point2::new(5.0, -0.5));
assert_eq!(bb.corner(2), Point2::new(1.0, 4.0));
assert_eq!(bb.corner(3), Point2::new(5.0, 4.0));
}
#[test]
fn bounding_box2_diagonal() {
let bb = BoundingBox2::new(Point2::new(1.0, -0.5), Point2::new(5.0, 4.0));
assert_eq!(bb.diagonal(), Vector2::new(4.0, 4.5));
}
#[test]
fn bounding_box2_overlaps() {
let bb1 = BoundingBox2::new(Point2::new(1.0, -0.5), Point2::new(5.0, 4.0));
let bb2 = BoundingBox2::new(Point2::new(-1.0, 2.0), Point2::new(3.0, 6.0));
let bb3 = BoundingBox2::new(Point2::new(3.5, 1.5), Point2::new(6.0, 5.0));
assert!(bb1.overlaps(&bb2));
assert!(!bb2.overlaps(&bb3));
}
#[test]
fn bounding_box2_is_inside() {
let bb = BoundingBox2::new(Point2::new(1.0, -0.5), Point2::new(5.0, 4.0));
assert!(!bb.is_inside(Point2::new(0.0, 2.0)));
assert!(bb.is_inside(Point2::new(2.0, 3.0)));
assert!(!bb.is_inside(Point2::new(4.0, 5.0)));
}
#[test]
fn bounding_box2_intersect_ray() {
// TODO: Needs more elaborate tests, with rays going in different directions and different hit and miss cases
}
#[test]
fn bounding_box2_union_bounding_box2() {
// TODO: Test different cases with and without overlap
}
#[test]
fn bounding_box2_union_point2() {
// TODO: Test different cases with point inside and outside bounding box
}
#[test]
fn bounding_box2_intersection_bounding_box2() {
// TODO: Test different cases with and without overlap
}
#[test]
fn matrix3x3_new() {
let m = Matrix3x3::new([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]);
//@formatter:off
assert_eq!(m.get(0, 0), 1.0); assert_eq!(m.get(0, 1), 2.0); assert_eq!(m.get(0, 2), 3.0);
assert_eq!(m.get(1, 0), 4.0); assert_eq!(m.get(1, 1), 5.0); assert_eq!(m.get(1, 2), 6.0);
assert_eq!(m.get(2, 0), 7.0); assert_eq!(m.get(2, 1), 8.0); assert_eq!(m.get(2, 2), 9.0);
//@formatter:on
}
#[test]
fn matrix3x3_identity() {
let m = Matrix3x3::identity();
//@formatter:off
assert_eq!(m.get(0, 0), 1.0); assert_eq!(m.get(0, 1), 0.0); assert_eq!(m.get(0, 2), 0.0);
assert_eq!(m.get(1, 0), 0.0); assert_eq!(m.get(1, 1), 1.0); assert_eq!(m.get(1, 2), 0.0);
assert_eq!(m.get(2, 0), 0.0); assert_eq!(m.get(2, 1), 0.0); assert_eq!(m.get(2, 2), 1.0);
//@formatter:on
}
#[test]
fn matrix3x3_translate() {
let m = Matrix3x3::translate(Vector2::new(-2.0, 3.0));
//@formatter:off
assert_eq!(m.get(0, 0), 1.0); assert_eq!(m.get(0, 1), 0.0); assert_eq!(m.get(0, 2), -2.0);
assert_eq!(m.get(1, 0), 0.0); assert_eq!(m.get(1, 1), 1.0); assert_eq!(m.get(1, 2), 3.0);
assert_eq!(m.get(2, 0), 0.0); assert_eq!(m.get(2, 1), 0.0); assert_eq!(m.get(2, 2), 1.0);
//@formatter:on
}
#[test]
fn matrix3x3_rotate() {
let angle = 0.52359877559829887307710723054658381;
let m = Matrix3x3::rotate(angle);
//@formatter:off
assert_eq!(m.get(0, 0), angle.cos()); assert_eq!(m.get(0, 1), -angle.sin()); assert_eq!(m.get(0, 2), 0.0);
assert_eq!(m.get(1, 0), angle.sin()); assert_eq!(m.get(1, 1), angle.cos()); assert_eq!(m.get(1, 2), 0.0);
assert_eq!(m.get(2, 0), 0.0); assert_eq!(m.get(2, 1), 0.0); assert_eq!(m.get(2, 2), 1.0);
//@formatter:on
}
#[test]
fn matrix3x3_scale() {
let m = Matrix3x3::scale(-2.0, 2.0);
//@formatter:off
assert_eq!(m.get(0, 0), -2.0); assert_eq!(m.get(0, 1), 0.0); assert_eq!(m.get(0, 2), 0.0);
assert_eq!(m.get(1, 0), 0.0); assert_eq!(m.get(1, 1), 2.0); assert_eq!(m.get(1, 2), 0.0);
assert_eq!(m.get(2, 0), 0.0); assert_eq!(m.get(2, 1), 0.0); assert_eq!(m.get(2, 2), 1.0);
//@formatter:on
}
#[test]
fn matrix3x3_scale_uniform() {
let m = Matrix3x3::scale_uniform(2.5);
//@formatter:off
assert_eq!(m.get(0, 0), 2.5); assert_eq!(m.get(0, 1), 0.0); assert_eq!(m.get(0, 2), 0.0);
assert_eq!(m.get(1, 0), 0.0); assert_eq!(m.get(1, 1), 2.5); assert_eq!(m.get(1, 2), 0.0);
assert_eq!(m.get(2, 0), 0.0); assert_eq!(m.get(2, 1), 0.0); assert_eq!(m.get(2, 2), 1.0);
//@formatter:on
}
#[test]
fn matrix3x3_get_mut() {
let mut m = Matrix3x3::identity();
*m.get_mut(0, 0) = 2.0;
*m.get_mut(0, 1) = 3.0;
*m.get_mut(1, 0) = -2.0;
*m.get_mut(2, 2) = 4.0;
//@formatter:off
assert_eq!(m.get(0, 0), 2.0); assert_eq!(m.get(0, 1), 3.0); assert_eq!(m.get(0, 2), 0.0);
assert_eq!(m.get(1, 0), -2.0); assert_eq!(m.get(1, 1), 1.0); assert_eq!(m.get(1, 2), 0.0);
assert_eq!(m.get(2, 0), 0.0); assert_eq!(m.get(2, 1), 0.0); assert_eq!(m.get(2, 2), 4.0);
//@formatter:on
}
#[test]
fn matrix3x3_set() {
let mut m = Matrix3x3::identity();
m.set(0, 0, 2.0);
m.set(0, 1, 3.0);
m.set(1, 0, -2.0);
m.set(2, 2, 4.0);
//@formatter:off
assert_eq!(m.get(0, 0), 2.0); assert_eq!(m.get(0, 1), 3.0); assert_eq!(m.get(0, 2), 0.0);
assert_eq!(m.get(1, 0), -2.0); assert_eq!(m.get(1, 1), 1.0); assert_eq!(m.get(1, 2), 0.0);
assert_eq!(m.get(2, 0), 0.0); assert_eq!(m.get(2, 1), 0.0); assert_eq!(m.get(2, 2), 4.0);
//@formatter:on
}
#[test]
fn matrix3x3_transpose() {
let m = Matrix3x3::new([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]).transpose();
//@formatter:off
assert_eq!(m.get(0, 0), 1.0); assert_eq!(m.get(0, 1), 4.0); assert_eq!(m.get(0, 2), 7.0);
assert_eq!(m.get(1, 0), 2.0); assert_eq!(m.get(1, 1), 5.0); assert_eq!(m.get(1, 2), 8.0);
assert_eq!(m.get(2, 0), 3.0); assert_eq!(m.get(2, 1), 6.0); assert_eq!(m.get(2, 2), 9.0);
//@formatter:on
}
#[test]
fn matrix3x3_inverse() {
// TODO: Implement test
}
#[test]
fn matrix3x3_mul_scalar() {
let m = &Matrix3x3::new([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]) * 2.5;
//@formatter:off
assert_eq!(m.get(0, 0), 2.5); assert_eq!(m.get(0, 1), 5.0); assert_eq!(m.get(0, 2), 7.5);
assert_eq!(m.get(1, 0), 10.0); assert_eq!(m.get(1, 1), 12.5); assert_eq!(m.get(1, 2), 15.0);
assert_eq!(m.get(2, 0), 17.5); assert_eq!(m.get(2, 1), 20.0); assert_eq!(m.get(2, 2), 22.5);
//@formatter:on
}
#[test]
fn scalar_mul_matrix3x3() {
let m = 2.5 * &Matrix3x3::new([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]);
//@formatter:off
assert_eq!(m.get(0, 0), 2.5); assert_eq!(m.get(0, 1), 5.0); assert_eq!(m.get(0, 2), 7.5);
assert_eq!(m.get(1, 0), 10.0); assert_eq!(m.get(1, 1), 12.5); assert_eq!(m.get(1, 2), 15.0);
assert_eq!(m.get(2, 0), 17.5); assert_eq!(m.get(2, 1), 20.0); assert_eq!(m.get(2, 2), 22.5);
//@formatter:on
}
#[test]
fn matrix3x3_mul_assign_scalar() {
let mut m = &mut Matrix3x3::new([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]);
m *= 2.5;
//@formatter:off
assert_eq!(m.get(0, 0), 2.5); assert_eq!(m.get(0, 1), 5.0); assert_eq!(m.get(0, 2), 7.5);
assert_eq!(m.get(1, 0), 10.0); assert_eq!(m.get(1, 1), 12.5); assert_eq!(m.get(1, 2), 15.0);
assert_eq!(m.get(2, 0), 17.5); assert_eq!(m.get(2, 1), 20.0); assert_eq!(m.get(2, 2), 22.5);
//@formatter:on
}
#[test]
fn matrix3x3_div_scalar() {
let m = &Matrix3x3::new([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]) / 2.5;
//@formatter:off
assert_eq!(m.get(0, 0), 0.4); assert_eq!(m.get(0, 1), 0.8); assert_eq!(m.get(0, 2), 1.2);
assert_eq!(m.get(1, 0), 1.6); assert_eq!(m.get(1, 1), 2.0); assert_eq!(m.get(1, 2), 2.4);
assert_eq!(m.get(2, 0), 2.8); assert_eq!(m.get(2, 1), 3.2); assert_eq!(m.get(2, 2), 3.6);
//@formatter:on
}
#[test]
fn matrix3x3_div_assign_scalar() {
let mut m = &mut Matrix3x3::new([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]);
m /= 2.5;
//@formatter:off
assert_eq!(m.get(0, 0), 0.4); assert_eq!(m.get(0, 1), 0.8); assert_eq!(m.get(0, 2), 1.2);
assert_eq!(m.get(1, 0), 1.6); assert_eq!(m.get(1, 1), 2.0); assert_eq!(m.get(1, 2), 2.4);
assert_eq!(m.get(2, 0), 2.8); assert_eq!(m.get(2, 1), 3.2); assert_eq!(m.get(2, 2), 3.6);
//@formatter:on
}
#[test]
fn matrix3x3_mul_point2() {
let m = Matrix3x3::new([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]);
assert_eq!(&m * Point2::new(-1.0, -2.0), Point2::new(2.0 / 14.0, 8.0 / 14.0));
}
#[test]
fn point2_mul_matrix3x3() {
let m = Matrix3x3::new([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]);
assert_eq!(Point2::new(-1.0, -2.0) * &m, Point2::new(2.0 / 6.0, 4.0 / 6.0));
}
#[test]
fn matrix3x3_mul_vector2() {
let m = Matrix3x3::new([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]);
assert_eq!(&m * Vector2::new(-1.0, -2.0), Vector2::new(-5.0, -14.0));
}
#[test]
fn vector2_mul_matrix3x3() {
let m = Matrix3x3::new([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]);
assert_eq!(Vector2::new(-1.0, -2.0) * &m, Vector2::new(-9.0, -12.0));
}
#[test]
fn matrix3x3_mul_matrix3x3() {
let m1 = Matrix3x3::new([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]);
let m2 = Matrix3x3::new([0.25, -0.75, 0.5, -0.5, 0.25, -0.75, 1.25, -1.75, 1.5]);
let m = &m1 * &m2;
//@formatter:off
assert_eq!(m.get(0, 0), 3.0); assert_eq!(m.get(0, 1), -5.5); assert_eq!(m.get(0, 2), 3.5);
assert_eq!(m.get(1, 0), 6.0); assert_eq!(m.get(1, 1), -12.25); assert_eq!(m.get(1, 2), 7.25);
assert_eq!(m.get(2, 0), 9.0); assert_eq!(m.get(2, 1), -19.0); assert_eq!(m.get(2, 2), 11.0);
//@formatter:on
}
#[test]
fn transform2_identity() {
let t = Transform2::identity();
assert_eq!(*t.forward, Matrix3x3::identity());
assert_eq!(*t.inverse, Matrix3x3::identity());
}
#[test]
fn transform2_translate() {
let v = Vector2::new(-3.0, 4.0);
let t = Transform2::translate(v);
assert_eq!(*t.forward, Matrix3x3::translate(v));
assert_eq!(*t.inverse, Matrix3x3::translate(-v));
}
#[test]
fn transform2_rotate() {
let angle = 0.52359877559829887307710723054658381;
let t = Transform2::rotate(angle);
assert_eq!(*t.forward, Matrix3x3::rotate(angle));
assert_eq!(*t.inverse, Matrix3x3::rotate(-angle));
}
#[test]
fn transform2_scale() {
let t = Transform2::scale(3.25, 2.5);
assert_eq!(*t.forward, Matrix3x3::scale(3.25, 2.5));
assert_eq!(*t.inverse, Matrix3x3::scale(1.0 / 3.25, 1.0 / 2.5));
}
#[test]
fn transform2_scale_uniform() {
let t = Transform2::scale_uniform(4.0);
assert_eq!(*t.forward, Matrix3x3::scale_uniform(4.0));
assert_eq!(*t.inverse, Matrix3x3::scale_uniform(1.0 / 4.0));
}
#[test]
fn transform2_and_then() {
let angle = 0.39269908169872415480783042290993786;
let v = Vector2::new(-2.0, 3.0);
let t = Transform2::rotate(angle).and_then(&Transform2::translate(v));
assert_eq!(*t.forward, &Matrix3x3::translate(v) * &Matrix3x3::rotate(angle));
assert_eq!(*t.inverse, &Matrix3x3::rotate(-angle) * &Matrix3x3::translate(-v));
}
#[test]
fn transform2_inverse() {
let t1 = Transform2::rotate(0.39269908169872415480783042290993786).and_then(&Transform2::translate(Vector2::new(-2.0, 3.0)));
let t2 = t1.inverse();
assert_eq!(*t1.forward, *t2.inverse);
assert_eq!(*t1.inverse, *t2.forward);
}
#[test]
fn transform2_from_matrix() {
let m = Matrix3x3::translate(Vector2::new(-2.0, 3.0));
let t = Transform2::try_from(m.clone()).unwrap();
assert_eq!(*t.forward, m);
assert_eq!(*t.inverse, m.inverse().unwrap());
}
#[test]
fn transform_point2() {
let angle = 0.39269908169872415480783042290993786;
let v = Vector2::new(-2.0, 3.0);
let t = Transform2::rotate(angle).and_then(&Transform2::translate(v));
let p = t.transform(Point2::new(-1.0, 1.0));
assert_eq!(p, &(&Matrix3x3::translate(v) * &Matrix3x3::rotate(angle)) * Point2::new(-1.0, 1.0));
}
#[test]
fn transform_vector2() {
let angle = 0.39269908169872415480783042290993786;
let scale = 2.25;
let t = Transform2::scale_uniform(scale).and_then(&Transform2::rotate(angle));
let v = t.transform(Vector2::new(-1.0, 1.0));
assert_eq!(v, &(&Matrix3x3::rotate(angle) * &Matrix3x3::scale_uniform(scale)) * Vector2::new(-1.0, 1.0));
}
#[test]
fn transform_ray2() {
let angle = 0.39269908169872415480783042290993786;
let v = Vector2::new(-2.0, 3.0);
let t = Transform2::rotate(angle).and_then(&Transform2::translate(v));
let origin = Point2::new(-2.0, 1.5);
let direction = Vector2::new(3.5, 2.25);
let r = t.transform(&Ray2::new(origin, direction));
assert_eq!(r, Ray2::new(t.transform(origin), t.transform(direction)));
}
#[test]
fn transform_bounding_box2() {
// TODO
}
#[test]
fn point3_new() {
let p = Point3::new(-1.0, 2.0, 3.0);
assert_eq!(p.x, -1.0);
assert_eq!(p.y, 2.0);
assert_eq!(p.z, 3.0);
}
#[test]
fn point3_origin() {
let p = Point3::origin();
assert_eq!(p.x, 0.0);
assert_eq!(p.y, 0.0);
assert_eq!(p.z, 0.0);
}
#[test]
fn point3_min_dimension() {
assert_eq!(Point3::new(-1.0, 2.0, 3.0).min_dimension(), Dimension3::X);
assert_eq!(Point3::new(-3.0, 2.0, 3.0).min_dimension(), Dimension3::Y);
assert_eq!(Point3::new(-1.0, 2.0, 0.5).min_dimension(), Dimension3::Z);
}
#[test]
fn point3_max_dimension() {
assert_eq!(Point3::new(-1.0, 2.0, 0.0).max_dimension(), Dimension3::Y);
assert_eq!(Point3::new(-3.0, 2.0, 0.0).max_dimension(), Dimension3::X);
assert_eq!(Point3::new(-1.0, 2.0, -2.5).max_dimension(), Dimension3::Z);
}
#[test]
fn point3_floor() {
assert_eq!(Point3::new(-1.3, 2.6, 4.5).floor(), Point3::new(-2.0, 2.0, 4.0));
}
#[test]
fn point3_ceil() {
assert_eq!(Point3::new(-1.3, 2.6, 4.5).ceil(), Point3::new(-1.0, 3.0, 5.0));
}
#[test]
fn point3_round() {
assert_eq!(Point3::new(-1.6, 2.3, 4.5).round(), Point3::new(-2.0, 2.0, 5.0));
}
#[test]
fn point3_trunc() {
assert_eq!(Point3::new(-1.3, 2.6, 4.5).trunc(), Point3::new(-1.0, 2.0, 4.0));
}
#[test]
fn point3_fract() {
assert_eq!(Point3::new(-1.25, 2.5, 4.75).fract(), Point3::new(-0.25, 0.5, 0.75));
}
#[test]
fn point3_abs() {
assert_eq!(Point3::new(-1.3, 2.6, -4.5).abs(), Point3::new(1.3, 2.6, 4.5));
}
#[test]
fn point3_permute() {
assert_eq!(Point3::new(1.0, 2.0, 3.0).permute(Dimension3::X, Dimension3::X, Dimension3::X), Point3::new(1.0, 1.0, 1.0));
assert_eq!(Point3::new(1.0, 2.0, 3.0).permute(Dimension3::X, Dimension3::Y, Dimension3::Z), Point3::new(1.0, 2.0, 3.0));
assert_eq!(Point3::new(1.0, 2.0, 3.0).permute(Dimension3::Y, Dimension3::Z, Dimension3::X), Point3::new(2.0, 3.0, 1.0));
assert_eq!(Point3::new(1.0, 2.0, 3.0).permute(Dimension3::Z, Dimension3::X, Dimension3::Y), Point3::new(3.0, 1.0, 2.0));
}
#[test]
fn point3_min() {
assert_eq!(min(Point3::new(-1.0, 2.0, 3.0), Point3::new(-3.0, 2.5, 1.5)), Point3::new(-3.0, 2.0, 1.5));
}
#[test]
fn point3_max() {
assert_eq!(max(Point3::new(-1.0, 2.0, 3.0), Point3::new(-3.0, 2.5, 1.5)), Point3::new(-1.0, 2.5, 3.0));
}
#[test]
fn point3_distance() {
assert_eq!(distance(Point3::new(2.0, 7.0, 1.0), Point3::new(3.0, 4.0, -1.0)), Scalar::sqrt(14.0));
}
#[test]
fn point3_closest() {
let p1 = Point3::new(4.0, 1.0, 3.0);
let p2 = Point3::new(1.0, 5.0, -2.0);
assert_eq!(Point3::new(-1.0, 2.0, 0.0).closest(p1, p2), p2);
}
#[test]
fn point3_farthest() {
let p1 = Point3::new(4.0, 1.0, 3.0);
let p2 = Point3::new(1.0, 5.0, -2.0);
assert_eq!(Point3::new(-1.0, 2.0, 0.0).farthest(p1, p2), p1);
}
#[test]
fn point3_index() {
let p = Point3::new(1.0, 2.0, 3.0);
assert_eq!(p[Dimension3::X], 1.0);
assert_eq!(p[Dimension3::Y], 2.0);
assert_eq!(p[Dimension3::Z], 3.0);
}
#[test]
fn point3_index_mut() {
let mut p = Point3::new(1.0, 2.0, 3.0);
p[Dimension3::X] = 3.0;
p[Dimension3::Y] = -1.0;
p[Dimension3::Z] = 2.0;
assert_eq!(p, Point3::new(3.0, -1.0, 2.0));
}
#[test]
fn point3_add_vector3() {
let p = Point3::new(1.0, 2.0, 3.0);
let v = Vector3::new(-0.5, 1.5, 2.5);
assert_eq!(p + v, Point3::new(0.5, 3.5, 5.5));
}
#[test]
fn point3_add_assign_vector3() {
let mut p = Point3::new(1.0, 2.0, 3.0);
let v = Vector3::new(-0.5, 1.5, 2.5);
p += v;
assert_eq!(p, Point3::new(0.5, 3.5, 5.5));
}
#[test]
fn point3_sub_vector3() {
let p = Point3::new(1.0, 2.0, 3.0);
let v = Vector3::new(-0.5, 1.5, 2.75);
assert_eq!(p - v, Point3::new(1.5, 0.5, 0.25));
}
#[test]
fn point3_sub_assign_vector3() {
let mut p = Point3::new(1.0, 2.0, 3.0);
let v = Vector3::new(-0.5, 1.5, 2.75);
p -= v;
assert_eq!(p, Point3::new(1.5, 0.5, 0.25));
}
#[test]
fn point3_sub_point3() {
let p1 = Point3::new(4.0, 2.0, 1.0);
let p2 = Point3::new(1.0, 5.0, 2.0);
assert_eq!(p1 - p2, Vector3::new(3.0, -3.0, -1.0));
}
#[test]
fn point3_neg() {
assert_eq!(-Point3::new(1.0, -2.0, 3.0), Point3::new(-1.0, 2.0, -3.0));
}
#[test]
fn point3_mul_scalar() {
assert_eq!(Point3::new(2.5, -1.5, 3.0) * 2.0, Point3::new(5.0, -3.0, 6.0));
}
#[test]
fn scalar_mul_point3() {
assert_eq!(2.0 * Point3::new(2.5, -1.5, 3.0), Point3::new(5.0, -3.0, 6.0));
}
#[test]
fn point3_mul_assign_scalar() {
let mut p = Point3::new(2.5, -1.5, 3.0);
p *= 2.0;
assert_eq!(p, Point3::new(5.0, -3.0, 6.0));
}
#[test]
fn point3_div_scalar() {
assert_eq!(Point3::new(2.5, -1.5, 3.0) / 2.0, Point3::new(1.25, -0.75, 1.5));
}
#[test]
fn point3_div_assign_scalar() {
let mut p = Point3::new(2.5, -1.5, 3.0);
p /= 2.0;
assert_eq!(p, Point3::new(1.25, -0.75, 1.5));
}
#[test]
fn point3_from_vector3() {
let p = Point3::from(Vector3::new(1.0, 2.0, 3.0));
assert_eq!(p, Point3::new(1.0, 2.0, 3.0));
}
#[test]
fn vector3_new() {
let v = Vector3::new(-1.0, 2.0, -3.0);
assert_eq!(v.x, -1.0);
assert_eq!(v.y, 2.0);
assert_eq!(v.z, -3.0);
}
#[test]
fn vector3_zero() {
let v = Vector3::zero();
assert_eq!(v.x, 0.0);
assert_eq!(v.y, 0.0);
assert_eq!(v.z, 0.0);
}
#[test]
fn vector3_x_axis() {
let v = Vector3::x_axis();
assert_eq!(v.x, 1.0);
assert_eq!(v.y, 0.0);
assert_eq!(v.z, 0.0);
}
#[test]
fn vector3_y_axis() {
let v = Vector3::y_axis();
assert_eq!(v.x, 0.0);
assert_eq!(v.y, 1.0);
assert_eq!(v.z, 0.0);
}
#[test]
fn vector3_z_axis() {
let v = Vector3::z_axis();
assert_eq!(v.x, 0.0);
assert_eq!(v.y, 0.0);
assert_eq!(v.z, 1.0);
}
#[test]
fn vector3_axis() {
assert_eq!(Vector3::axis(Dimension3::X), Vector3::x_axis());
assert_eq!(Vector3::axis(Dimension3::Y), Vector3::y_axis());
assert_eq!(Vector3::axis(Dimension3::Z), Vector3::z_axis());
}
#[test]
fn vector3_normalize() {
let v = Vector3::new(3.0, -2.0, 1.0);
assert_eq!(v.normalize(), v / Scalar::sqrt(14.0));
}
#[test]
fn vector3_min_dimension() {
assert_eq!(Vector3::new(-1.0, 2.0, 3.0).min_dimension(), Dimension3::X);
assert_eq!(Vector3::new(-3.0, 2.0, 2.5).min_dimension(), Dimension3::Y);
}
#[test]
fn vector3_max_dimension() {
assert_eq!(Vector3::new(-1.0, 2.0, 0.5).max_dimension(), Dimension3::Y);
assert_eq!(Vector3::new(-3.0, 2.0, 0.5).max_dimension(), Dimension3::X);
assert_eq!(Vector3::new(-3.0, 2.0, 4.0).max_dimension(), Dimension3::Z);
}
#[test]
fn vector3_floor() {
assert_eq!(Vector3::new(-1.3, 2.6, 3.75).floor(), Vector3::new(-2.0, 2.0, 3.0));
}
#[test]
fn vector3_ceil() {
assert_eq!(Vector3::new(-1.3, 2.6, 3.75).ceil(), Vector3::new(-1.0, 3.0, 4.0));
}
#[test]
fn vector3_round() {
assert_eq!(Vector3::new(-1.6, 2.3, 3.75).round(), Vector3::new(-2.0, 2.0, 4.0));
}
#[test]
fn vector3_trunc() {
assert_eq!(Vector3::new(-1.3, 2.6, 3.75).trunc(), Vector3::new(-1.0, 2.0, 3.0));
}
#[test]
fn vector3_fract() {
assert_eq!(Vector3::new(-1.25, 2.5, 3.75).fract(), Vector3::new(-0.25, 0.5, 0.75));
}
#[test]
fn vector3_abs() {
assert_eq!(Vector3::new(-1.3, 2.6, -2.0).abs(), Vector3::new(1.3, 2.6, 2.0));
}
#[test]
fn vector3_permute() {
assert_eq!(Vector3::new(1.0, 2.0, 3.0).permute(Dimension3::X, Dimension3::X, Dimension3::X), Vector3::new(1.0, 1.0, 1.0));
assert_eq!(Vector3::new(1.0, 2.0, 3.0).permute(Dimension3::X, Dimension3::Y, Dimension3::Z), Vector3::new(1.0, 2.0, 3.0));
assert_eq!(Vector3::new(1.0, 2.0, 3.0).permute(Dimension3::Y, Dimension3::Z, Dimension3::X), Vector3::new(2.0, 3.0, 1.0));
assert_eq!(Vector3::new(1.0, 2.0, 3.0).permute(Dimension3::Z, Dimension3::X, Dimension3::Y), Vector3::new(3.0, 1.0, 2.0));
}
#[test]
fn vector3_min() {
assert_eq!(min(Vector3::new(-1.0, 2.0, 3.0), Vector3::new(-3.0, 2.5, 3.5)), Vector3::new(-3.0, 2.0, 3.0));
}
#[test]
fn vector3_max() {
assert_eq!(max(Vector3::new(-1.0, 2.0, 3.0), Vector3::new(-3.0, 2.5, 3.5)), Vector3::new(-1.0, 2.5, 3.5));
}
#[test]
fn vector3_length() {
assert_eq!(Vector3::new(2.0, 3.0, 6.0).length(), 7.0);
}
#[test]
fn vector3_shortest() {
let v1 = Vector3::new(-1.0, -3.0, 0.5);
let v2 = Vector3::new(2.0, 1.5, 0.5);
assert_eq!(shortest(v1, v2), v2);
}
#[test]
fn vector3_longest() {
let v1 = Vector3::new(-1.0, -3.0, 0.5);
let v2 = Vector3::new(2.0, 1.5, 0.5);
assert_eq!(longest(v1, v2), v1);
}
#[test]
fn vector3_dot_vector3() {
let v1 = Vector3::new(-1.0, -3.0, 2.5);
let v2 = Vector3::new(2.0, 1.5, 0.5);
assert_eq!(dot(v1, v2), -5.25);
}
#[test]
fn vector3_index() {
let v = Vector3::new(1.0, 2.0, 3.0);
assert_eq!(v[Dimension3::X], 1.0);
assert_eq!(v[Dimension3::Y], 2.0);
assert_eq!(v[Dimension3::Z], 3.0);
}
#[test]
fn vector3_index_mut() {
let mut v = Vector3::new(1.0, 2.0, 3.0);
v[Dimension3::X] = 3.0;
v[Dimension3::Y] = -1.0;
v[Dimension3::Z] = 2.5;
assert_eq!(v, Vector3::new(3.0, -1.0, 2.5));
}
#[test]
fn vector3_add_vector3() {
let v1 = Vector3::new(1.0, 2.0, 3.0);
let v2 = Vector3::new(-0.5, 1.5, 3.0);
assert_eq!(v1 + v2, Vector3::new(0.5, 3.5, 6.0));
}
#[test]
fn vector3_add_assign_vector3() {
let mut v1 = Vector3::new(1.0, 2.0, 3.0);
let v2 = Vector3::new(-0.5, 1.5, 3.0);
v1 += v2;
assert_eq!(v1, Vector3::new(0.5, 3.5, 6.0));
}
#[test]
fn vector3_sub_vector3() {
let v1 = Vector3::new(1.0, 2.0, 3.0);
let v2 = Vector3::new(-0.5, 1.5, 3.5);
assert_eq!(v1 - v2, Vector3::new(1.5, 0.5, -0.5));
}
#[test]
fn vector3_sub_assign_vector3() {
let mut v1 = Vector3::new(1.0, 2.0, 3.0);
let v2 = Vector3::new(-0.5, 1.5, 3.5);
v1 -= v2;
assert_eq!(v1, Vector3::new(1.5, 0.5, -0.5));
}
#[test]
fn vector3_neg() {
assert_eq!(-Vector3::new(1.0, -2.0, 3.0), Vector3::new(-1.0, 2.0, -3.0));
}
#[test]
fn vector3_mul_scalar() {
assert_eq!(Vector3::new(2.5, -1.5, 4.0) * 2.0, Vector3::new(5.0, -3.0, 8.0));
}
#[test]
fn scalar_mul_vector3() {
assert_eq!(2.0 * Vector3::new(2.5, -1.5, 4.0), Vector3::new(5.0, -3.0, 8.0));
}
#[test]
fn vector3_mul_assign_scalar() {
let mut v = Vector3::new(2.5, -1.5, 4.0);
v *= 2.0;
assert_eq!(v, Vector3::new(5.0, -3.0, 8.0));
}
#[test]
fn vector3_div_scalar() {
assert_eq!(Vector3::new(2.5, -1.5, 4.0) / 2.0, Vector3::new(1.25, -0.75, 2.0));
}
#[test]
fn vector3_div_assign_scalar() {
let mut v = Vector3::new(2.5, -1.5, 4.0);
v /= 2.0;
assert_eq!(v, Vector3::new(1.25, -0.75, 2.0));
}
#[test]
fn vector3_from_point3() {
let v = Vector3::from(Point3::new(1.0, 2.0, 3.0));
assert_eq!(v, Vector3::new(1.0, 2.0, 3.0));
}
// TODO: Tests for BoundingBox3, Matrix4x4, Transform3
}
|
// Copyright 2016 functils Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
//! Rust has really powerful generics that can be used to make it more functional in terms of it's
//! composition. Included in this crate are helper functions often times used in languages like
//! Haskell.
#![allow(dead_code)]
extern crate kinder;
pub mod list;
// ReExporting traits for various stdlib items so that they can be used implicitly
pub use std::iter::FromIterator;
/// Given a Tuple return it's first element
pub fn fst<A,B>(x:(A,B)) -> A {
x.0
}
/// Given a Tuple return it's second element
pub fn snd<A,B>(x:(A,B)) -> B {
x.1
}
/// Given a value return itself. This is like the mathematical identity function
pub fn identity<A>(x:A) -> A {
x
}
/// Works like Haskell's bool, this is a way to do an inline if then else statement
/// if b then x else y
pub fn ifte<A>(x: A, y: A, b:bool) -> A {
if b { x } else { y }
}
#[test]
fn fst_works() {
assert_eq!(fst((1,"Hello")),1);
}
#[test]
fn snd_works() {
assert_eq!(snd((1,"Hello")),"Hello");
}
#[test]
fn identity_works() {
assert_eq!(identity(1), 1);
assert_eq!(identity(1.0), 1.0);
assert_eq!(identity("Hi"), "Hi");
}
#[test]
fn ifte_works() {
assert_eq!(ifte("Hi","Bye", true), "Hi");
assert_eq!(ifte("Hi","Bye", false), "Bye");
}
|
test_stdout!(
with_positive_start_and_positive_length_returns_subbinary,
"<<1>>\n"
);
test_stdout!(
with_size_start_and_negative_size_length_returns_binary,
"<<0,1,2>>\n"
);
test_stdout!(
with_zero_start_and_size_length_returns_binary,
"<<0,1,2>>\n"
);
|
use graph::Graph;
use std::hash::Hash;
pub fn nodes_by_depth_from<T: Hash + Copy + Eq + Ord>(g: &Graph<T>, start: T) -> Vec<T> {
fn traverse<T: Hash + Copy + Eq + Ord>(g: &Graph<T>, v: T, visited: &mut Vec<T>) -> Vec<T> {
if !visited.contains(&v) {
visited.push(v);
}
let neighbors = g.get_node(&v).unwrap().adjacents();
let to_visit: Vec<&T> = neighbors.iter().filter(|&n| !visited.contains(n)).collect();
for n in to_visit {
for w in traverse(g, *n, visited) {
if !visited.contains(&w) {
visited.push(w);
}
}
}
visited.clone()
}
let mut res = traverse(g, start, &mut vec![]);
res.reverse();
res
}
#[cfg(test)]
mod tests {
use super::*;
use P80::graph_converters::unlabeled;
#[test]
fn test_nodes_by_depth_from() {
let g = unlabeled::from_string("[a-b, b-c, e, a-c, a-d]");
let nodes = nodes_by_depth_from(&g, 'd');
assert!(nodes == vec!['c', 'b', 'a', 'd'] || nodes == vec!['b', 'c', 'a', 'd']);
}
}
|
use rocket::{get, response::NamedFile};
use std::io;
#[get("/")]
pub(in crate) fn index() -> io::Result<NamedFile> {
// failure to read the file is an error, thus we want to return Result,
// which in failure translates to HTTP 500. If we returned Option, failures
// would result in HTTP 404, which would be misleading.
NamedFile::open("resources/index.html")
}
#[get("/script.js")]
pub(in crate) fn script() -> io::Result<NamedFile> {
NamedFile::open("resources/script.js")
}
#[cfg(test)]
mod tests {
use crate::rocket;
use rocket::http::{ContentType, Status};
use rocket::local::Client;
#[test]
fn test_index() {
test_static_page("/", ContentType::HTML, "<!DOCTYPE html>\n<html>");
}
#[test]
fn test_script() {
test_static_page(
"/script.js?this=is&ignored",
ContentType::JavaScript,
"function inputChanged(",
);
}
fn test_static_page(path: &str, expected_type: ContentType, expected_start: &str) {
let client = Client::new(rocket()).unwrap();
let mut response = client.get(path).dispatch();
assert_eq!(response.status(), Status::Ok);
assert_eq!(response.content_type(), Some(expected_type));
assert_eq!(&response.body_string().unwrap()[..expected_start.len()], expected_start);
}
}
|
fn main() {
let asdf_fdsa = "<.<";
assert(#concat_idents[asd,f_f,dsa] == "<.<");
assert(#ident_to_str[use_mention_distinction]
== "use_mention_distinction");
} |
//! This module is just a fake implementation that has the advantages of compiling on every
//! architecture. Hence, we can at least check that the code compiles even if we cannot verify it
//! functionally.
use std::{ffi::CStr, result::Result, sync::RwLock};
static DEVICE: Device = Device {};
pub struct Buffer<T: Copy> {
pub len: usize,
data: RwLock<Vec<T>>,
}
impl<T: Copy> Buffer<T> {
pub fn new(_: &'static Device, len: usize) -> Self {
Buffer {
len,
data: RwLock::new(Vec::with_capacity(len)),
}
}
pub fn raw_ptr(&self) -> *const libc::c_void {
self.data.read().unwrap().as_ptr() as *const libc::c_void
}
pub fn read(&self) -> Result<Vec<T>, ()> {
Ok(self.data.read().unwrap().clone())
}
pub fn write(&self, data: &[T]) -> Result<(), ()> {
*self.data.write().unwrap() = data.to_vec();
Ok(())
}
}
pub struct Device {}
impl Device {
pub fn get() -> &'static Device {
&DEVICE
}
pub fn build_wrapper(&self, _: &CStr, _: &CStr) -> Result<Wrapper, ()> {
Ok(Wrapper {})
}
/// Compiles a kernel.
pub fn build_kernel(
&self,
_: &CStr,
_: &CStr,
_: &CStr,
_: &Wrapper,
) -> Result<Kernel, ()> {
Ok(Kernel {})
}
pub fn execute_kernel(&self, _: &mut Kernel) -> Result<(), ()> {
panic!("This fake executor is just here to allow compilation")
}
}
pub struct Mem {}
impl Mem {
pub fn get_mem_size() -> usize {
8
}
}
pub struct Kernel {}
impl Kernel {
pub fn set_num_clusters(&mut self, _: usize) -> Result<(), ()> {
Ok(())
}
pub fn set_args(&mut self, _: &[usize], _: &[*const libc::c_void]) -> Result<(), ()> {
Ok(())
}
}
pub struct Wrapper {}
|
use std::sync::atomic::AtomicBool;
use metrics_exporter_prometheus::PrometheusHandle;
use warp::Filter;
/// Spawns a server which hosts a `/health` endpoint.
pub async fn spawn_server(
addr: impl Into<std::net::SocketAddr> + 'static,
readiness: std::sync::Arc<AtomicBool>,
prometheus_handle: PrometheusHandle,
) -> tokio::task::JoinHandle<()> {
let server = warp::serve(routes(readiness, prometheus_handle));
let server = server.bind(addr);
tokio::spawn(server)
}
fn routes(
readiness: std::sync::Arc<AtomicBool>,
prometheus_handle: PrometheusHandle,
) -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
health_route()
.or(ready_route(readiness))
.or(metrics_route(prometheus_handle))
}
/// Always returns `Ok(200)` at `/health`.
fn health_route() -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
warp::get().and(warp::path!("health")).map(warp::reply)
}
/// Returns `Ok` if `readiness == true`, or `SERVICE_UNAVAILABLE` otherwise.
fn ready_route(
readiness: std::sync::Arc<AtomicBool>,
) -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
warp::get()
.and(warp::path!("ready"))
.map(move || -> std::sync::Arc<AtomicBool> { readiness.clone() })
.and_then(|readiness: std::sync::Arc<AtomicBool>| async move {
match readiness.load(std::sync::atomic::Ordering::Relaxed) {
true => Ok::<_, std::convert::Infallible>(warp::http::StatusCode::OK),
false => Ok(warp::http::StatusCode::SERVICE_UNAVAILABLE),
}
})
}
/// Returns Prometheus merics snapshot at `/metrics`.
fn metrics_route(
handle: PrometheusHandle,
) -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
warp::get()
.and(warp::path!("metrics"))
.map(move || -> PrometheusHandle { handle.clone() })
.and_then(|handle: PrometheusHandle| async move {
Ok::<_, std::convert::Infallible>(warp::http::Response::builder().body(handle.render()))
})
}
#[cfg(test)]
mod tests {
use metrics_exporter_prometheus::PrometheusBuilder;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
#[tokio::test]
async fn health() {
let recorder = PrometheusBuilder::new().build_recorder();
let handle = recorder.handle();
let readiness = Arc::new(AtomicBool::new(false));
let filter = super::routes(readiness, handle);
let response = warp::test::request().path("/health").reply(&filter).await;
assert_eq!(response.status(), http::StatusCode::OK);
}
#[tokio::test]
async fn ready() {
let recorder = PrometheusBuilder::new().build_recorder();
let handle = recorder.handle();
let readiness = Arc::new(AtomicBool::new(false));
let filter = super::routes(readiness.clone(), handle);
let response = warp::test::request().path("/ready").reply(&filter).await;
assert_eq!(response.status(), http::StatusCode::SERVICE_UNAVAILABLE);
readiness.store(true, std::sync::atomic::Ordering::Relaxed);
let response = warp::test::request().path("/ready").reply(&filter).await;
assert_eq!(response.status(), http::StatusCode::OK);
}
#[tokio::test]
async fn metrics() {
use pathfinder_common::test_utils::metrics::ScopedRecorderGuard;
let recorder = PrometheusBuilder::new().build_recorder();
let handle = recorder.handle();
// Automatically deregister the recorder
let _guard = ScopedRecorderGuard::new(recorder);
// We don't care about the recorder being a singleton as the counter name here does not
// interfere with any other "real" counter registered in pathfinder or other tests
let counter = metrics::register_counter!("x");
counter.increment(123);
let readiness = Arc::new(AtomicBool::new(false));
let filter = super::routes(readiness.clone(), handle);
let response = warp::test::request().path("/metrics").reply(&filter).await;
assert_eq!(response.status(), http::StatusCode::OK);
assert_eq!(response.body(), "# TYPE x counter\nx 123\n\n");
}
}
|
use diesel;
use diesel::prelude::*;
use diesel::sqlite::SqliteConnection;
use iron::typemap::Key;
use ruma_identifiers::{RoomId, UserId};
use slog::Logger;
use api::{MatrixApi, RocketchatApi};
use config::Config;
use errors::*;
use handlers::matrix::CommandHandler;
use models::schema::{rocketchat_servers, users_on_rocketchat_servers};
use models::{Room, UserOnRocketchatServer};
/// A Rocket.Chat server.
#[derive(Associations, Debug, Identifiable, Queryable)]
#[table_name = "rocketchat_servers"]
pub struct RocketchatServer {
/// The unique identifier for the Rocket.Chat server
pub id: String,
/// The URL to connect to the Rocket.Chat server
pub rocketchat_url: String,
/// The token to identify requests from the Rocket.Chat server
pub rocketchat_token: Option<String>,
/// created timestamp
pub created_at: String,
/// updated timestamp
pub updated_at: String,
}
/// A new `Room`, not yet saved.
#[derive(Insertable)]
#[table_name = "rocketchat_servers"]
pub struct NewRocketchatServer<'a> {
/// The unique identifier for the Rocket.Chat server
pub id: &'a str,
/// The URL to connect to the Rocket.Chat server
pub rocketchat_url: &'a str,
/// The token to identify reuqests from the Rocket.Chat server
pub rocketchat_token: Option<&'a str>,
}
/// Credentials to perform a login on the Rocket.Chat server. The `user_id` is used to find
/// the corresponding matrix user.
#[derive(Serialize, Deserialize)]
pub struct Credentials {
/// The users unique id on the Matrix homeserver
pub user_id: UserId,
/// The username on the Rocket.Chat server
pub rocketchat_username: String,
/// The password on the Rocket.Chat server
pub password: String,
/// The URL of the Rocket.Chat server on which the user wants to login
pub rocketchat_url: String,
}
impl RocketchatServer {
/// Insert a `RocketchatServer`.
pub fn insert(connection: &SqliteConnection, new_rocketchat_server: &NewRocketchatServer) -> Result<RocketchatServer> {
diesel::insert_into(rocketchat_servers::table)
.values(new_rocketchat_server)
.execute(connection)
.chain_err(|| ErrorKind::DBInsertError)?;
let server = RocketchatServer::find(connection, new_rocketchat_server.rocketchat_url)?;
Ok(server)
}
/// Find a `RocketchatServer` by its URL, return an error if the `RocketchatServer` is not
/// found.
pub fn find(connection: &SqliteConnection, url: &str) -> Result<RocketchatServer> {
let server = rocketchat_servers::table
.filter(rocketchat_servers::rocketchat_url.eq(url))
.first(connection)
.chain_err(|| ErrorKind::DBSelectError)?;
Ok(server)
}
/// Find a `RocketchatServer` by its ID.
pub fn find_by_id(connection: &SqliteConnection, id: &str) -> Result<Option<RocketchatServer>> {
let rocketchat_servers = rocketchat_servers::table
.filter(rocketchat_servers::id.eq(id))
.load(connection)
.chain_err(|| ErrorKind::DBSelectError)?;
Ok(rocketchat_servers.into_iter().next())
}
/// Find a `RocketchatServer` by its URL.
pub fn find_by_url(connection: &SqliteConnection, url: &str) -> Result<Option<RocketchatServer>> {
let rocketchat_servers = rocketchat_servers::table
.filter(rocketchat_servers::rocketchat_url.eq(url))
.load(connection)
.chain_err(|| ErrorKind::DBSelectError)?;
Ok(rocketchat_servers.into_iter().next())
}
/// Find a `RocketchatServer` bit its token.
pub fn find_by_token(connection: &SqliteConnection, token: &str) -> Result<Option<RocketchatServer>> {
let rocketchat_servers = rocketchat_servers::table
.filter(rocketchat_servers::rocketchat_token.eq(Some(token)))
.load(connection)
.chain_err(|| ErrorKind::DBSelectError)?;
Ok(rocketchat_servers.into_iter().next())
}
/// Get all connected servers.
pub fn find_connected_servers(connection: &SqliteConnection) -> Result<Vec<RocketchatServer>> {
let rocketchat_servers = rocketchat_servers::table
.filter(rocketchat_servers::rocketchat_token.is_not_null())
.load::<RocketchatServer>(connection)
.chain_err(|| ErrorKind::DBSelectError)?;
Ok(rocketchat_servers)
}
/// Perform a login request on the Rocket.Chat server.
/// Stores the credentials if the login is successful and an error if it failes.
pub fn login(
&self,
config: &Config,
connection: &SqliteConnection,
logger: &Logger,
matrix_api: &MatrixApi,
credentials: &Credentials,
admin_room_id: Option<RoomId>,
) -> Result<()> {
let mut user_on_rocketchat_server = UserOnRocketchatServer::find(connection, &credentials.user_id, self.id.clone())?;
let rocketchat_api = RocketchatApi::new(self.rocketchat_url.clone(), logger.clone())?;
let (user_id, auth_token) = rocketchat_api.login(&credentials.rocketchat_username, &credentials.password)?;
user_on_rocketchat_server.set_credentials(connection, Some(user_id.clone()), Some(auth_token.clone()))?;
if let Some(room_id) = admin_room_id {
let room = Room::new(config, logger, matrix_api, room_id.clone());
let bot_user_id = config.matrix_bot_user_id()?;
let as_url = config.as_url.clone();
let message = CommandHandler::build_help_message(connection, &room, as_url, &credentials.user_id)?;
matrix_api.send_text_message(room_id, bot_user_id, message)?;
}
info!(logger, "Successfully executed login command on Rocket.Chat server {}", self.rocketchat_url);
Ok(())
}
/// Get all users that are connected to this Rocket.Chat server.
pub fn logged_in_users_on_rocketchat_server(&self, connection: &SqliteConnection) -> Result<Vec<UserOnRocketchatServer>> {
let users_on_rocketchat_server: Vec<UserOnRocketchatServer> = users_on_rocketchat_servers::table
.filter(users_on_rocketchat_servers::rocketchat_server_id.eq(&self.id))
.filter(users_on_rocketchat_servers::rocketchat_auth_token.is_not_null())
.load(connection)
.chain_err(|| ErrorKind::DBSelectError)?;
Ok(users_on_rocketchat_server)
}
}
impl Key for RocketchatServer {
type Value = RocketchatServer;
}
|
use super::UnicodeDots;
use unicode_width::UnicodeWidthChar;
/// Returns the displayed width in columns of a `text`.
fn display_width(text: &str, tabstop: usize) -> usize {
let mut w = 0;
for ch in text.chars() {
w += if ch == '\t' {
tabstop - (w % tabstop)
} else {
ch.width().unwrap_or(2)
};
}
w
}
/// Return an array in which arr[i] stores the display width till char[i] for `text`.
fn accumulate_text_width(text: &str, tabstop: usize) -> Vec<usize> {
let mut ret = Vec::with_capacity(text.chars().count());
let mut w = 0;
for ch in text.chars() {
w += if ch == '\t' {
tabstop - (w % tabstop)
} else {
ch.width().unwrap_or(2)
};
ret.push(w);
}
ret
}
fn remove_first_char(value: &str) -> &str {
let mut chars = value.chars();
chars.next();
chars.as_str()
}
/// `String` -> `..ring`.
///
/// Returns the original text with the left part trimmed and the length of trimmed text in chars.
fn trim_left(text: &str, width: usize, tabstop: usize) -> (&str, usize) {
// Assume each char takes at least one column
let chars_count = text.chars().count();
let (mut text, mut trimmed_chars_len) = if chars_count > width + UnicodeDots::CHAR_LEN {
let diff = chars_count - width - UnicodeDots::CHAR_LEN;
// 292 tracing::error!(error = ?e, "💔 Error at initializing GTAGS, attempting to recreate...");, 56, 4
// thread 'main' panicked at 'byte index 62 is not a char boundary; it is inside '💔' (bytes 61..65) of `292 tracing::error!(error = ?e, "💔 Error at initializing GTAGS, attempting to recreate...");`', library/core/src/str/mod.rs:127:5
//
// Can not use `(String::from(&text[diff..]), diff)` due to diff could not a char boundary.
let mut chars = text.chars();
(0..diff).for_each(|_| {
chars.next();
});
(chars.as_str(), diff)
} else {
(text, 0)
};
let mut current_width = display_width(text, tabstop);
while current_width > width && !text.is_empty() {
text = remove_first_char(text);
trimmed_chars_len += 1;
current_width = display_width(text, tabstop);
}
(text, trimmed_chars_len)
}
/// `String` -> `Stri..`.
fn trim_right(text: &str, width: usize, tabstop: usize) -> &str {
let current_width = display_width(text, tabstop);
if current_width > width {
if text.is_char_boundary(width) {
&text[..width]
} else {
let mut width = width;
while !text.is_char_boundary(width) {
width -= 1;
}
&text[..width]
}
} else {
text
}
}
#[derive(Debug)]
pub enum TrimInfo {
// ..ring
Left { start: usize },
// Stri..
Right,
// ..ri..
Both { start: usize },
}
impl TrimInfo {
pub fn left_trim_start(&self) -> Option<usize> {
match self {
Self::Left { start } | Self::Both { start } => Some(*start),
_ => None,
}
}
}
#[derive(Debug)]
pub struct TrimmedText {
// Trimmed text with dots.
pub trimmed_text: String,
pub indices: Vec<usize>,
pub trim_info: TrimInfo,
}
/// Returns the potential trimmed text.
///
/// In order to make the highlights of matches visible in the container as much as possible,
/// both the left and right of the original text can be trimmed.
///
/// For example, if the matches appear in the end of a long string, we should trim the left and
/// only show the right part.
///
/// ```text
/// xxxxxxxxxxxxxxxxxxxxxxxxxxMMxxxxxMxxxxx
/// shift ->| |
/// ```
///
/// container_width = winwidth - prefix_length
///
/// # Arguments
///
/// - `text`: original untruncated text.
/// - `indices`: highlights in char-positions.
/// - `container_width`: the width of window to display the text.
pub fn trim_text(
text: &str,
indices: &[usize],
container_width: usize,
tabstop: usize,
) -> Option<TrimmedText> {
let match_start = indices[0];
let match_end = *indices.last()?;
let acc_width = accumulate_text_width(text, tabstop);
// Width needed for diplaying the whole text.
let full_width = *acc_width.last()?;
if full_width <= container_width {
return None;
}
// xxxxxxxxxxxxxxxxxxxxMMxxxxxMxxxxxMMMxxxxxxxxxxxx
// |<- w1 ->|<- w2 ->|<- w3 ->|
//
// w1, w2, w3 = len_before_matched, len_matched, len_after_matched
let w1 = if match_start == 0 {
0
} else {
acc_width[match_start - 1]
};
let w2 = if match_end >= acc_width.len() {
full_width - w1
} else {
acc_width[match_end] - w1
};
let w3 = full_width - w1 - w2;
if (w1 > w3 && w2 + w3 <= container_width) || (w3 <= 2) {
// right-fixed, ..ring
let (trimmed_text, trimmed_len) =
trim_left(text, container_width - UnicodeDots::CHAR_LEN, tabstop);
let trimmed_text = format!("{}{trimmed_text}", UnicodeDots::DOTS);
let indices = indices
.iter()
.filter_map(|x| (x + UnicodeDots::CHAR_LEN).checked_sub(trimmed_len))
.filter(|x| *x > UnicodeDots::CHAR_LEN - 1) // Ignore the highlights in `..`
.collect();
Some(TrimmedText {
trimmed_text,
indices,
trim_info: TrimInfo::Left { start: trimmed_len },
})
} else if w1 <= w3 && w1 + w2 <= container_width {
// left-fixed, Stri..
let trimmed_text = trim_right(text, container_width - UnicodeDots::CHAR_LEN, tabstop);
let trimmed_text = format!("{trimmed_text}{}", UnicodeDots::DOTS);
let indices = indices
.iter()
.filter(|x| *x + UnicodeDots::CHAR_LEN < container_width) // Ignore the highlights in `..`
.copied()
.collect::<Vec<_>>();
Some(TrimmedText {
trimmed_text,
indices,
trim_info: TrimInfo::Right,
})
} else {
// Convert the char-position to byte-position.
let match_start_byte_idx = text.char_indices().nth(match_start)?.0;
// left-right, ..Stri..
let left_truncated_text = &text[match_start_byte_idx..];
let trimmed_text = trim_right(
left_truncated_text,
container_width - UnicodeDots::CHAR_LEN - UnicodeDots::CHAR_LEN,
tabstop,
);
let trimmed_text = format!("{}{trimmed_text}{}", UnicodeDots::DOTS, UnicodeDots::DOTS);
let indices = indices
.iter()
.map(|x| x - match_start + UnicodeDots::CHAR_LEN)
.filter(|x| *x + UnicodeDots::CHAR_LEN < container_width) // Ignore the highlights in `..`
.collect::<Vec<_>>();
Some(TrimmedText {
trimmed_text,
indices,
trim_info: TrimInfo::Both {
start: match_start_byte_idx,
},
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::filter_single_line;
use types::MatchedItem;
const DOTS: char = UnicodeDots::DOTS;
#[test]
fn test_trim_left() {
let text = "0123456789abcdef";
let width = 5;
let (trimmed, offset) = trim_left(text, width, 4);
assert_eq!(trimmed, "bcdef");
assert_eq!(offset, 11);
}
#[test]
fn test_trim_right() {
let text = "0123456789abcdef";
let width = 5;
let trimmed = trim_right(text, width, 4);
assert_eq!(trimmed, "01234");
}
#[test]
fn test_trim_text() {
// raw_line, query, highlighted, container_width, display_line
let test_cases = vec![
(
"directories/are/nested/a/lot/then/the/matched/items/will/be/invisible/file.scss",
"files",
"files",
50usize,
format!("{DOTS}hen/the/matched/items/will/be/invisible/file.scss"),
),
(
"directories/are/nested/a/lot/then/the/matched/items/will/be/invisible/another-file.scss",
"files",
"files",
50usize,
format!("{DOTS}matched/items/will/be/invisible/another-file.scss"),
),
(
"directories/are/nested/a/lot/then/the/matched/items/will/be/invisible/file.js",
"files",
"files",
50usize,
format!("{DOTS}/then/the/matched/items/will/be/invisible/file.js"),
),
(
"directories/are/nested/a/lot/then/the/matched/items/will/be/invisible/another-file.js",
"files",
"files",
50usize,
format!("{DOTS}e/matched/items/will/be/invisible/another-file.js"),
),
(
"/Users/xuliucheng/Library/Caches/Homebrew/universal-ctags--git/Units/afl-fuzz.r/github-issue-625-r.d/input.r",
"srcggithub",
"srcg",
50usize,
format!("{DOTS}s/Homebrew/universal-ctags--git/Units/afl-fuzz.r{DOTS}"),
),
(
" // Wait until propagation delay period after block we plan to mine on",
"bmine",
"bmine",
58usize,
format!("{DOTS}l propagation delay period after block we plan to mine on"),
),
(
"fuzzy-filter/target/debug/deps/librustversion-b273394e6c9c64f6.dylib.dSYM/Contents/Resources/DWARF/librustversion-b273394e6c9c64f6.dylib",
"srlisresource",
"srlisR",
50usize,
format!("{DOTS}stversion-b273394e6c9c64f6.dylib.dSYM/Contents/R{DOTS}"),
),
(
"target/debug/deps/libstructopt_derive-3921fbf02d8d2ffe.dylib.dSYM/Contents/Resources/DWARF/libstructopt_derive-3921fbf02d8d2ffe.dylib",
"srlisresource",
"srli",
50usize,
format!("{DOTS}structopt_derive-3921fbf02d8d2ffe.dylib.dSYM/Con{DOTS}")
),
(
"fuzzy-filter/target/debug/deps/librustversion-15764ff2535f190d.dylib.dSYM/Contents/Resources/DWARF/librustversion-15764ff2535f190d.dylib",
"srlisresource",
"srlisR",
50usize,
format!("{DOTS}stversion-15764ff2535f190d.dylib.dSYM/Contents/R{DOTS}")
),
(
"crates/readtags/sys/libreadtags/autom4te.cache/requests",
"srlisrs",
"lisrs",
42usize,
format!("{DOTS}s/sys/libreadtags/autom4te.cache/requests")
),
(
"crates/maple_cli/src/dumb_analyzer/find_usages/default_types.rs",
"srlisrs",
"lisrs",
42usize,
format!("{DOTS}umb_analyzer/find_usages/default_types.rs")
),
(
r#"crates/printer/src/lib.rs:312:4:" crates/fuzzy_filter/target/debug/deps/librustversion-15764ff2535f190d.dylib.dSYM/Contents/Resources/DWARF/librustversion-15764ff2535f190d.dylib"#,
"ctagslisr",
"ctagsli",
80usize,
format!("{DOTS}crates/fuzzy_filter/target/debug/deps/librustversion-15764ff2535f190d.dylib.dS{DOTS}")
),
];
for (text, query, highlighted, container_width, display_line) in test_cases {
let ranked = filter_single_line(text.to_string(), query);
let MatchedItem { indices, .. } = ranked[0].clone();
let (display_line_got, indices_post) = trim_text(text, &indices, container_width, 4)
.map(|trimmed| (trimmed.trimmed_text, trimmed.indices))
.unwrap_or_else(|| (text.into(), indices.clone()));
let truncated_text_got = display_line_got.clone();
let highlighted_got = indices_post
.iter()
.filter_map(|i| truncated_text_got.chars().nth(*i))
.collect::<String>();
assert_eq!(display_line, display_line_got);
assert_eq!(highlighted, highlighted_got);
}
}
}
|
use anyhow::{Context, Result};
use std::{env, path::PathBuf};
const PROTO_FILE: &str = "src/kubernetes/cri/proto/api.proto";
fn main() -> Result<()> {
tonic_build::configure()
.out_dir("src/kubernetes/cri/api")
.compile(
&[PROTO_FILE],
&[&PathBuf::from(PROTO_FILE)
.parent()
.context("no path parent")?
.display()
.to_string()],
)
.context("compile CRI protobuffers")?;
Ok(())
} |
pub mod node {
tonic::include_proto!("node");
}
pub mod client;
|
use std::process::Command;
use std::str;
pub fn get_clipboard_image() -> Option<Vec<u8>> {
if let Ok(out) = Command::new("xclip")
.arg("-o")
.arg("-selection")
.arg("clipboard")
.arg("-t")
.arg("TARGETS")
.output()
{
if let Ok(s) = str::from_utf8(&out.stdout) {
for line in s.lines() {
if line == "image/png" {
if let Ok(out) = Command::new("xclip")
.arg("-o")
.arg("-selection")
.arg("clipboard")
.arg("-t")
.arg("image/png")
.output()
{
return Some(out.stdout);
}
}
}
}
}
None
}
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::path::{Path, PathBuf};
use std::fmt;
#[derive(Debug, Clone)]
pub struct QualifiedId {
pub base_id: Identifier,
pub quals: Vec<String>,
}
impl QualifiedId {
pub fn new(base: Identifier) -> QualifiedId {
QualifiedId { base_id: base, quals: Vec::new() }
}
pub fn qualify(mut self, id: Identifier) -> QualifiedId {
self.quals.push(self.base_id.id);
self.base_id = id;
self
}
pub fn new_from_iter<'a, I> (mut ids: I) -> QualifiedId
where I: Iterator<Item=&'a str>
{
let loc = Location { file_name: PathBuf::from("<builtin>"), lineno: 0, colno: 0 };
let mut qual_id = QualifiedId::new(Identifier::new(String::from(ids.next().unwrap()), loc.clone()));
for i in ids {
qual_id = qual_id.qualify(Identifier::new(String::from(i), loc.clone()));
}
qual_id
}
pub fn short_name(&self) -> String {
self.base_id.to_string()
}
pub fn full_name(&self) -> Option<String> {
if self.quals.is_empty() {
None
} else {
Some(self.to_string())
}
}
pub fn loc(&self) -> &Location {
&self.base_id.loc
}
}
impl fmt::Display for QualifiedId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for q in &self.quals {
try!(write!(f, "{}::", q));
}
write!(f, "{}", self.base_id)
}
}
#[derive(Debug)]
pub struct TypeSpec {
pub spec: QualifiedId,
pub array: bool,
pub nullable: bool,
}
impl TypeSpec {
pub fn new(spec: QualifiedId) -> TypeSpec {
TypeSpec { spec: spec, array: false, nullable: false }
}
// XXX Get rid of these setters if the fields are just public anyways?
pub fn set_array(mut self, is_array: bool) -> TypeSpec {
self.array = is_array;
self
}
pub fn set_nullable(mut self, is_nullable: bool) -> TypeSpec {
self.nullable = is_nullable;
self
}
pub fn loc(&self) -> &Location {
self.spec.loc()
}
}
#[derive(Debug)]
pub struct Param {
pub name: Identifier,
pub type_spec: TypeSpec,
}
impl Param {
pub fn new(type_spec: TypeSpec, name: Identifier) -> Param {
Param { name: name, type_spec: type_spec }
}
}
#[derive(Debug)]
pub struct StructField {
pub type_spec: TypeSpec,
pub name: Identifier,
}
impl StructField {
pub fn new(ty: TypeSpec, name: Identifier) -> StructField {
StructField { type_spec: ty, name: name }
}
}
#[derive(Clone, Debug)]
pub struct Namespace {
pub name: Identifier,
pub namespaces: Vec<String>,
}
impl Namespace {
pub fn new(name: Identifier) -> Namespace {
Namespace { name: name, namespaces: Vec::new() }
}
pub fn add_outer_namespace(&mut self, namespace: &str) {
self.namespaces.insert(0, String::from(namespace));
}
pub fn qname(&self) -> QualifiedId {
QualifiedId { base_id: self.name.clone(), quals: self.namespaces.clone() }
}
}
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum Compress {
None,
Enabled,
All,
}
#[derive(Debug)]
pub enum MessageModifier {
Verify,
Compress(Compress),
}
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum SendSemantics {
Async,
Sync,
Intr,
}
impl SendSemantics {
pub fn is_async(&self) -> bool {
self == &SendSemantics::Async
}
pub fn is_sync(&self) -> bool {
self == &SendSemantics::Sync
}
pub fn is_intr(&self) -> bool {
self == &SendSemantics::Intr
}
}
#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)]
pub enum Nesting {
None,
InsideSync,
InsideCpow,
}
impl Nesting {
pub fn is_none(&self) -> bool {
self == &Nesting::None
}
pub fn inside_sync(&self) -> bool {
self == &Nesting::InsideSync
}
pub fn inside_cpow(&self) -> bool {
self == &Nesting::InsideCpow
}
}
#[derive(Debug, Clone, Copy)]
pub enum Priority {
Normal,
High,
Input,
}
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum Direction {
ToParent,
ToChild,
ToParentOrChild,
}
impl Direction {
pub fn is_to_parent(&self) -> bool {
self == &Direction::ToParent
}
pub fn is_to_child(&self) -> bool {
self == &Direction::ToChild
}
pub fn is_both(&self) -> bool {
self == &Direction::ToParentOrChild
}
}
#[derive(Debug, Clone)]
pub struct Location {
pub file_name: PathBuf,
pub lineno: usize,
pub colno: usize,
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {}:{}", self.file_name.display(), self.lineno, self.colno)
}
}
#[derive(Debug, Clone)]
pub struct Identifier {
pub id: String,
pub loc: Location,
}
impl Identifier {
pub fn new(name: String, loc: Location) -> Identifier {
Identifier {
id: name,
loc: loc,
}
}
}
impl fmt::Display for Identifier {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.id)
}
}
#[derive(Debug)]
pub struct MessageDecl {
pub name: Identifier,
pub send_semantics: SendSemantics,
pub nested: Nesting,
pub prio: Priority,
pub direction: Direction,
pub in_params: Vec<Param>,
pub out_params: Vec<Param>,
pub compress: Compress,
pub verify: bool,
}
impl MessageDecl {
pub fn new(name: Identifier) -> MessageDecl {
MessageDecl {
name: name,
send_semantics: SendSemantics::Async,
nested: Nesting::None,
prio: Priority::Normal,
direction: Direction::ToParent,
in_params: Vec::new(),
out_params: Vec::new(),
compress: Compress::None,
verify: false,
}
}
pub fn add_in_params(&mut self, mut in_params: Vec<Param>) {
self.in_params.append(&mut in_params);
}
pub fn add_out_params(&mut self, mut out_params: Vec<Param>) {
self.out_params.append(&mut out_params);
}
pub fn add_modifiers(&mut self, modifiers: Vec<MessageModifier>) {
for modifier in modifiers {
match modifier {
MessageModifier::Compress(c) => self.compress = c,
MessageModifier::Verify => self.verify = true,
}
}
}
}
#[derive(Debug)]
pub struct Protocol {
pub send_semantics: SendSemantics,
pub nested: Nesting,
pub managers: Vec<Identifier>,
pub manages: Vec<Identifier>,
pub messages: Vec<MessageDecl>,
}
impl Protocol {
pub fn new(send_semantics: SendSemantics, nested: Nesting,
managers: Vec<Identifier>, manages: Vec<Identifier>, decls: Vec<MessageDecl>) -> Protocol {
Protocol { send_semantics: send_semantics, nested: nested,
managers: managers, manages: manages, messages: decls }
}
}
#[derive(Debug)]
pub enum CxxTypeKind {
Struct,
Class,
}
#[derive(Debug)]
pub struct UsingStmt {
pub cxx_type: TypeSpec,
pub header: String,
pub kind: Option<CxxTypeKind>,
}
#[derive(Clone, Debug, PartialEq)]
pub enum FileType {
Protocol,
Header,
}
impl FileType {
pub fn from_file_path(file_path: &Path) -> Option<FileType> {
if let Some(e) = file_path.extension() {
if e == "ipdlh" {
Some(FileType::Header)
} else {
Some(FileType::Protocol)
}
} else {
None
}
}
}
// Translation unit identifier.
pub type TUId = i32;
#[derive(Debug)]
pub struct TranslationUnit {
pub namespace: Namespace,
pub file_type: FileType,
pub file_name: PathBuf,
pub cxx_includes: Vec<String>,
pub includes: Vec<TUId>,
pub using: Vec<UsingStmt>,
pub structs: Vec<(Namespace, Vec<StructField>)>,
pub unions: Vec<(Namespace, Vec<TypeSpec>)>,
pub protocol: Option<(Namespace, Protocol)>,
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/// Constructs a suggestion with common fields.
/// Remember to `use crate::models::{Suggestion, DisplayInfo, Intent, AddModInfo};`
#[macro_export]
macro_rules! suggestion {
// Match without icon
(
action = $action:expr,
title = $title:expr,
parameters = [$($rest:tt)*],
story = $story:expr
) => {
create_suggestion!(
action = $action,
parameters = [$($rest)*], story = $story,
display_info = DisplayInfo::new().with_title($title),
)
};
// Match with icon
(
action = $action:expr,
title = $title:expr,
icon = $icon:expr,
parameters = [$($rest:tt)*],
story = $story:expr
) => {
create_suggestion!(
action = $action,
parameters = [$($rest)*], story = $story,
display_info = DisplayInfo::new().with_icon($icon).with_title($title),
)
};
}
macro_rules! create_suggestion {
(
action = $action:expr,
parameters = [
$((
name = $name:expr,
entity_reference = $reference:expr
)
),*
],
story = $story:expr,
display_info = $display_info:expr,
) => {
Suggestion::new(
AddModInfo::new(
Intent::new()
.with_action($action)
$(.add_parameter($name, $reference))*,
Some($story.to_string()),
None,
),
$display_info
)
};
}
|
use exonum::crypto::{Hash, PublicKey, hash};
use crate::common_structs;
// 160, 176, 192 and 208
const A_NUMBER: u8 = 160;
const B_NUMBER: u8 = 176;
const C_NUMBER: u8 = 192;
const D_NUMBER: u8 = 208;
pub enum IdType {
Account(PublicKey),
Token { owner_id: Vec<u8>, symbol: String }
}
fn gen_id(source: &IdType) -> Vec<u8> {
let prepared = match source {
IdType::Account(public_key) => (public_key.as_ref().to_vec(), A_NUMBER),
IdType::Token { owner_id, symbol} =>
([owner_id, symbol.as_bytes()].concat(), B_NUMBER)
};
let mut first_bytes = hash(&prepared.0).as_ref().to_vec();
first_bytes.truncate(15);
let mut address: Vec<u8> = vec![prepared.1];
address.extend(first_bytes);
address
}
#[cfg(test)]
mod tests {
use exonum::crypto::{SEED_LENGTH, Seed, gen_keypair_from_seed};
use super::*;
use hex::encode;
#[test]
fn gen_id_account() {
let (public_key, secret_key) =
gen_keypair_from_seed(&Seed::new([1; SEED_LENGTH]));
let id_type = IdType::Account(public_key);
let id = gen_id(&id_type);
assert_eq!(hex::encode(id), "a034750f98bd59fcfc946da45aaabe93")
}
#[test]
fn gen_token_account() {
let account_id = vec![1; 16];
let id_type = IdType::Token {
owner_id: account_id,
symbol: String::from("USD")
};
let id = gen_id(&id_type);
assert_eq!(hex::encode(id), "b02abae7735206dd7e0901f2f967eea8")
}
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {
#[cfg(feature = "Win32_Foundation")]
pub fn WSManCloseCommand(commandhandle: *mut WSMAN_COMMAND, flags: u32, r#async: *const WSMAN_SHELL_ASYNC);
pub fn WSManCloseOperation(operationhandle: *mut WSMAN_OPERATION, flags: u32) -> u32;
pub fn WSManCloseSession(session: *mut WSMAN_SESSION, flags: u32) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManCloseShell(shellhandle: *mut WSMAN_SHELL, flags: u32, r#async: *const WSMAN_SHELL_ASYNC);
#[cfg(feature = "Win32_Foundation")]
pub fn WSManConnectShell(session: *mut WSMAN_SESSION, flags: u32, resourceuri: super::super::Foundation::PWSTR, shellid: super::super::Foundation::PWSTR, options: *const WSMAN_OPTION_SET, connectxml: *const WSMAN_DATA, r#async: *const WSMAN_SHELL_ASYNC, shell: *mut *mut WSMAN_SHELL);
#[cfg(feature = "Win32_Foundation")]
pub fn WSManConnectShellCommand(shell: *mut WSMAN_SHELL, flags: u32, commandid: super::super::Foundation::PWSTR, options: *const WSMAN_OPTION_SET, connectxml: *const WSMAN_DATA, r#async: *const WSMAN_SHELL_ASYNC, command: *mut *mut WSMAN_COMMAND);
#[cfg(feature = "Win32_Foundation")]
pub fn WSManCreateSession(apihandle: *const WSMAN_API, connection: super::super::Foundation::PWSTR, flags: u32, serverauthenticationcredentials: *const WSMAN_AUTHENTICATION_CREDENTIALS, proxyinfo: *const WSMAN_PROXY_INFO, session: *mut *mut WSMAN_SESSION) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManCreateShell(session: *mut WSMAN_SESSION, flags: u32, resourceuri: super::super::Foundation::PWSTR, startupinfo: *const WSMAN_SHELL_STARTUP_INFO_V11, options: *const WSMAN_OPTION_SET, createxml: *const WSMAN_DATA, r#async: *const WSMAN_SHELL_ASYNC, shell: *mut *mut WSMAN_SHELL);
#[cfg(feature = "Win32_Foundation")]
pub fn WSManCreateShellEx(session: *mut WSMAN_SESSION, flags: u32, resourceuri: super::super::Foundation::PWSTR, shellid: super::super::Foundation::PWSTR, startupinfo: *const WSMAN_SHELL_STARTUP_INFO_V11, options: *const WSMAN_OPTION_SET, createxml: *const WSMAN_DATA, r#async: *const WSMAN_SHELL_ASYNC, shell: *mut *mut WSMAN_SHELL);
pub fn WSManDeinitialize(apihandle: *mut WSMAN_API, flags: u32) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManDisconnectShell(shell: *mut WSMAN_SHELL, flags: u32, disconnectinfo: *const WSMAN_SHELL_DISCONNECT_INFO, r#async: *const WSMAN_SHELL_ASYNC);
#[cfg(feature = "Win32_Foundation")]
pub fn WSManGetErrorMessage(apihandle: *const WSMAN_API, flags: u32, languagecode: super::super::Foundation::PWSTR, errorcode: u32, messagelength: u32, message: super::super::Foundation::PWSTR, messagelengthused: *mut u32) -> u32;
pub fn WSManGetSessionOptionAsDword(session: *const WSMAN_SESSION, option: WSManSessionOption, value: *mut u32) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManGetSessionOptionAsString(session: *const WSMAN_SESSION, option: WSManSessionOption, stringlength: u32, string: super::super::Foundation::PWSTR, stringlengthused: *mut u32) -> u32;
pub fn WSManInitialize(flags: u32, apihandle: *mut *mut WSMAN_API) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManPluginAuthzOperationComplete(senderdetails: *const WSMAN_SENDER_DETAILS, flags: u32, userauthorizationcontext: *const ::core::ffi::c_void, errorcode: u32, extendederrorinformation: super::super::Foundation::PWSTR) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManPluginAuthzQueryQuotaComplete(senderdetails: *const WSMAN_SENDER_DETAILS, flags: u32, quota: *const WSMAN_AUTHZ_QUOTA, errorcode: u32, extendederrorinformation: super::super::Foundation::PWSTR) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManPluginAuthzUserComplete(senderdetails: *const WSMAN_SENDER_DETAILS, flags: u32, userauthorizationcontext: *const ::core::ffi::c_void, impersonationtoken: super::super::Foundation::HANDLE, userisadministrator: super::super::Foundation::BOOL, errorcode: u32, extendederrorinformation: super::super::Foundation::PWSTR) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManPluginFreeRequestDetails(requestdetails: *const WSMAN_PLUGIN_REQUEST) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManPluginGetConfiguration(plugincontext: *const ::core::ffi::c_void, flags: u32, data: *mut WSMAN_DATA) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManPluginGetOperationParameters(requestdetails: *const WSMAN_PLUGIN_REQUEST, flags: u32, data: *mut WSMAN_DATA) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManPluginOperationComplete(requestdetails: *const WSMAN_PLUGIN_REQUEST, flags: u32, errorcode: u32, extendedinformation: super::super::Foundation::PWSTR) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManPluginReceiveResult(requestdetails: *const WSMAN_PLUGIN_REQUEST, flags: u32, stream: super::super::Foundation::PWSTR, streamresult: *const WSMAN_DATA, commandstate: super::super::Foundation::PWSTR, exitcode: u32) -> u32;
pub fn WSManPluginReportCompletion(plugincontext: *const ::core::ffi::c_void, flags: u32) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManPluginReportContext(requestdetails: *const WSMAN_PLUGIN_REQUEST, flags: u32, context: *const ::core::ffi::c_void) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManReceiveShellOutput(shell: *mut WSMAN_SHELL, command: *const WSMAN_COMMAND, flags: u32, desiredstreamset: *const WSMAN_STREAM_ID_SET, r#async: *const WSMAN_SHELL_ASYNC, receiveoperation: *mut *mut WSMAN_OPERATION);
#[cfg(feature = "Win32_Foundation")]
pub fn WSManReconnectShell(shell: *mut WSMAN_SHELL, flags: u32, r#async: *const WSMAN_SHELL_ASYNC);
#[cfg(feature = "Win32_Foundation")]
pub fn WSManReconnectShellCommand(commandhandle: *mut WSMAN_COMMAND, flags: u32, r#async: *const WSMAN_SHELL_ASYNC);
#[cfg(feature = "Win32_Foundation")]
pub fn WSManRunShellCommand(shell: *mut WSMAN_SHELL, flags: u32, commandline: super::super::Foundation::PWSTR, args: *const WSMAN_COMMAND_ARG_SET, options: *const WSMAN_OPTION_SET, r#async: *const WSMAN_SHELL_ASYNC, command: *mut *mut WSMAN_COMMAND);
#[cfg(feature = "Win32_Foundation")]
pub fn WSManRunShellCommandEx(shell: *mut WSMAN_SHELL, flags: u32, commandid: super::super::Foundation::PWSTR, commandline: super::super::Foundation::PWSTR, args: *const WSMAN_COMMAND_ARG_SET, options: *const WSMAN_OPTION_SET, r#async: *const WSMAN_SHELL_ASYNC, command: *mut *mut WSMAN_COMMAND);
#[cfg(feature = "Win32_Foundation")]
pub fn WSManSendShellInput(shell: *const WSMAN_SHELL, command: *const WSMAN_COMMAND, flags: u32, streamid: super::super::Foundation::PWSTR, streamdata: *const WSMAN_DATA, endofstream: super::super::Foundation::BOOL, r#async: *const WSMAN_SHELL_ASYNC, sendoperation: *mut *mut WSMAN_OPERATION);
#[cfg(feature = "Win32_Foundation")]
pub fn WSManSetSessionOption(session: *const WSMAN_SESSION, option: WSManSessionOption, data: *const WSMAN_DATA) -> u32;
#[cfg(feature = "Win32_Foundation")]
pub fn WSManSignalShell(shell: *const WSMAN_SHELL, command: *const WSMAN_COMMAND, flags: u32, code: super::super::Foundation::PWSTR, r#async: *const WSMAN_SHELL_ASYNC, signaloperation: *mut *mut WSMAN_OPERATION);
}
pub const ERROR_REDIRECT_LOCATION_INVALID: u32 = 2150859191u32;
pub const ERROR_REDIRECT_LOCATION_TOO_LONG: u32 = 2150859190u32;
pub const ERROR_SERVICE_CBT_HARDENING_INVALID: u32 = 2150859192u32;
pub const ERROR_WINRS_CLIENT_CLOSERECEIVEHANDLE_NULL_PARAM: u32 = 2150859058u32;
pub const ERROR_WINRS_CLIENT_CLOSESENDHANDLE_NULL_PARAM: u32 = 2150859061u32;
pub const ERROR_WINRS_CLIENT_CLOSESHELL_NULL_PARAM: u32 = 2150859050u32;
pub const ERROR_WINRS_CLIENT_CREATESHELL_NULL_PARAM: u32 = 2150859049u32;
pub const ERROR_WINRS_CLIENT_FREECREATESHELLRESULT_NULL_PARAM: u32 = 2150859051u32;
pub const ERROR_WINRS_CLIENT_FREEPULLRESULT_NULL_PARAM: u32 = 2150859056u32;
pub const ERROR_WINRS_CLIENT_FREERUNCOMMANDRESULT_NULL_PARAM: u32 = 2150859053u32;
pub const ERROR_WINRS_CLIENT_GET_NULL_PARAM: u32 = 2150859062u32;
pub const ERROR_WINRS_CLIENT_INVALID_FLAG: u32 = 2150859040u32;
pub const ERROR_WINRS_CLIENT_NULL_PARAM: u32 = 2150859041u32;
pub const ERROR_WINRS_CLIENT_PULL_NULL_PARAM: u32 = 2150859057u32;
pub const ERROR_WINRS_CLIENT_PUSH_NULL_PARAM: u32 = 2150859060u32;
pub const ERROR_WINRS_CLIENT_RECEIVE_NULL_PARAM: u32 = 2150859055u32;
pub const ERROR_WINRS_CLIENT_RUNCOMMAND_NULL_PARAM: u32 = 2150859052u32;
pub const ERROR_WINRS_CLIENT_SEND_NULL_PARAM: u32 = 2150859059u32;
pub const ERROR_WINRS_CLIENT_SIGNAL_NULL_PARAM: u32 = 2150859054u32;
pub const ERROR_WINRS_CODE_PAGE_NOT_SUPPORTED: u32 = 2150859072u32;
pub const ERROR_WINRS_CONNECT_RESPONSE_BAD_BODY: u32 = 2150859211u32;
pub const ERROR_WINRS_IDLETIMEOUT_OUTOFBOUNDS: u32 = 2150859250u32;
pub const ERROR_WINRS_RECEIVE_IN_PROGRESS: u32 = 2150859047u32;
pub const ERROR_WINRS_RECEIVE_NO_RESPONSE_DATA: u32 = 2150859048u32;
pub const ERROR_WINRS_SHELLCOMMAND_CLIENTID_NOT_VALID: u32 = 2150859220u32;
pub const ERROR_WINRS_SHELLCOMMAND_CLIENTID_RESOURCE_CONFLICT: u32 = 2150859222u32;
pub const ERROR_WINRS_SHELLCOMMAND_DISCONNECT_OPERATION_NOT_VALID: u32 = 2150859224u32;
pub const ERROR_WINRS_SHELLCOMMAND_RECONNECT_OPERATION_NOT_VALID: u32 = 2150859219u32;
pub const ERROR_WINRS_SHELL_CLIENTID_NOT_VALID: u32 = 2150859221u32;
pub const ERROR_WINRS_SHELL_CLIENTID_RESOURCE_CONFLICT: u32 = 2150859223u32;
pub const ERROR_WINRS_SHELL_CLIENTSESSIONID_MISMATCH: u32 = 2150859206u32;
pub const ERROR_WINRS_SHELL_CONNECTED_TO_DIFFERENT_CLIENT: u32 = 2150859213u32;
pub const ERROR_WINRS_SHELL_DISCONNECTED: u32 = 2150859204u32;
pub const ERROR_WINRS_SHELL_DISCONNECT_NOT_SUPPORTED: u32 = 2150859205u32;
pub const ERROR_WINRS_SHELL_DISCONNECT_OPERATION_NOT_GRACEFUL: u32 = 2150859214u32;
pub const ERROR_WINRS_SHELL_DISCONNECT_OPERATION_NOT_VALID: u32 = 2150859215u32;
pub const ERROR_WINRS_SHELL_RECONNECT_OPERATION_NOT_VALID: u32 = 2150859216u32;
pub const ERROR_WINRS_SHELL_URI_INVALID: u32 = 2150859099u32;
pub const ERROR_WSMAN_ACK_NOT_SUPPORTED: u32 = 2150858853u32;
pub const ERROR_WSMAN_ACTION_MISMATCH: u32 = 2150858801u32;
pub const ERROR_WSMAN_ACTION_NOT_SUPPORTED: u32 = 2150858771u32;
pub const ERROR_WSMAN_ADDOBJECT_MISSING_EPR: u32 = 2150859045u32;
pub const ERROR_WSMAN_ADDOBJECT_MISSING_OBJECT: u32 = 2150859044u32;
pub const ERROR_WSMAN_ALREADY_EXISTS: u32 = 2150858803u32;
pub const ERROR_WSMAN_AMBIGUOUS_SELECTORS: u32 = 2150858846u32;
pub const ERROR_WSMAN_AUTHENTICATION_INVALID_FLAG: u32 = 2150859077u32;
pub const ERROR_WSMAN_AUTHORIZATION_MODE_NOT_SUPPORTED: u32 = 2150858852u32;
pub const ERROR_WSMAN_BAD_METHOD: u32 = 2150858868u32;
pub const ERROR_WSMAN_BATCHSIZE_TOO_SMALL: u32 = 2150858919u32;
pub const ERROR_WSMAN_BATCH_COMPLETE: u32 = 2150858756u32;
pub const ERROR_WSMAN_BOOKMARKS_NOT_SUPPORTED: u32 = 2150858859u32;
pub const ERROR_WSMAN_BOOKMARK_EXPIRED: u32 = 2150858832u32;
pub const ERROR_WSMAN_CANNOT_CHANGE_KEYS: u32 = 2150858989u32;
pub const ERROR_WSMAN_CANNOT_DECRYPT: u32 = 2150859001u32;
pub const ERROR_WSMAN_CANNOT_PROCESS_FILTER: u32 = 2150859042u32;
pub const ERROR_WSMAN_CANNOT_USE_ALLOW_NEGOTIATE_IMPLICIT_CREDENTIALS_FOR_HTTP: u32 = 2150859184u32;
pub const ERROR_WSMAN_CANNOT_USE_CERTIFICATES_FOR_HTTP: u32 = 2150858968u32;
pub const ERROR_WSMAN_CANNOT_USE_PROXY_SETTINGS_FOR_CREDSSP: u32 = 2150859187u32;
pub const ERROR_WSMAN_CANNOT_USE_PROXY_SETTINGS_FOR_HTTP: u32 = 2150859185u32;
pub const ERROR_WSMAN_CANNOT_USE_PROXY_SETTINGS_FOR_KERBEROS: u32 = 2150859186u32;
pub const ERROR_WSMAN_CERTMAPPING_CONFIGLIMIT_EXCEEDED: u32 = 2150859091u32;
pub const ERROR_WSMAN_CERTMAPPING_CREDENTIAL_MANAGEMENT_FAILIED: u32 = 2150859262u32;
pub const ERROR_WSMAN_CERTMAPPING_INVALIDISSUERKEY: u32 = 2150859106u32;
pub const ERROR_WSMAN_CERTMAPPING_INVALIDSUBJECTKEY: u32 = 2150859105u32;
pub const ERROR_WSMAN_CERTMAPPING_INVALIDUSERCREDENTIALS: u32 = 2150859092u32;
pub const ERROR_WSMAN_CERTMAPPING_PASSWORDBLANK: u32 = 2150859115u32;
pub const ERROR_WSMAN_CERTMAPPING_PASSWORDTOOLONG: u32 = 2150859114u32;
pub const ERROR_WSMAN_CERTMAPPING_PASSWORDUSERTUPLE: u32 = 2150859116u32;
pub const ERROR_WSMAN_CERT_INVALID_USAGE: u32 = 2150858990u32;
pub const ERROR_WSMAN_CERT_INVALID_USAGE_CLIENT: u32 = 2150859093u32;
pub const ERROR_WSMAN_CERT_MISSING_AUTH_FLAG: u32 = 2150859094u32;
pub const ERROR_WSMAN_CERT_MULTIPLE_CREDENTIALS_FLAG: u32 = 2150859095u32;
pub const ERROR_WSMAN_CERT_NOT_FOUND: u32 = 2150858882u32;
pub const ERROR_WSMAN_CERT_THUMBPRINT_BLANK: u32 = 2150858983u32;
pub const ERROR_WSMAN_CERT_THUMBPRINT_NOT_BLANK: u32 = 2150858982u32;
pub const ERROR_WSMAN_CHARACTER_SET: u32 = 2150858828u32;
pub const ERROR_WSMAN_CLIENT_ALLOWFRESHCREDENTIALS: u32 = 2150859171u32;
pub const ERROR_WSMAN_CLIENT_ALLOWFRESHCREDENTIALS_NTLMONLY: u32 = 2150859172u32;
pub const ERROR_WSMAN_CLIENT_BASIC_AUTHENTICATION_DISABLED: u32 = 2150858975u32;
pub const ERROR_WSMAN_CLIENT_BATCH_ITEMS_TOO_SMALL: u32 = 2150858946u32;
pub const ERROR_WSMAN_CLIENT_BLANK_ACTION_URI: u32 = 2150858948u32;
pub const ERROR_WSMAN_CLIENT_BLANK_INPUT_XML: u32 = 2150858945u32;
pub const ERROR_WSMAN_CLIENT_BLANK_URI: u32 = 2150858943u32;
pub const ERROR_WSMAN_CLIENT_CERTIFICATES_AUTHENTICATION_DISABLED: u32 = 2150858979u32;
pub const ERROR_WSMAN_CLIENT_CERT_NEEDED: u32 = 2150858932u32;
pub const ERROR_WSMAN_CLIENT_CERT_UNKNOWN_LOCATION: u32 = 2150858934u32;
pub const ERROR_WSMAN_CLIENT_CERT_UNKNOWN_TYPE: u32 = 2150858933u32;
pub const ERROR_WSMAN_CLIENT_CERT_UNNEEDED_CREDS: u32 = 2150858927u32;
pub const ERROR_WSMAN_CLIENT_CERT_UNNEEDED_USERNAME: u32 = 2150858929u32;
pub const ERROR_WSMAN_CLIENT_CLOSECOMMAND_NULL_PARAM: u32 = 2150859135u32;
pub const ERROR_WSMAN_CLIENT_CLOSESHELL_NULL_PARAM: u32 = 2150859134u32;
pub const ERROR_WSMAN_CLIENT_COMPRESSION_INVALID_OPTION: u32 = 2150858957u32;
pub const ERROR_WSMAN_CLIENT_CONNECTCOMMAND_NULL_PARAM: u32 = 2150859210u32;
pub const ERROR_WSMAN_CLIENT_CONNECTSHELL_NULL_PARAM: u32 = 2150859209u32;
pub const ERROR_WSMAN_CLIENT_CONSTRUCTERROR_NULL_PARAM: u32 = 2150858965u32;
pub const ERROR_WSMAN_CLIENT_CREATESESSION_NULL_PARAM: u32 = 2150858938u32;
pub const ERROR_WSMAN_CLIENT_CREATESHELL_NAME_INVALID: u32 = 2150859202u32;
pub const ERROR_WSMAN_CLIENT_CREATESHELL_NULL_PARAM: u32 = 2150859130u32;
pub const ERROR_WSMAN_CLIENT_CREDENTIALS_FLAG_NEEDED: u32 = 2150858931u32;
pub const ERROR_WSMAN_CLIENT_CREDENTIALS_FOR_DEFAULT_AUTHENTICATION: u32 = 2150859078u32;
pub const ERROR_WSMAN_CLIENT_CREDENTIALS_FOR_PROXY_AUTHENTICATION: u32 = 2150859163u32;
pub const ERROR_WSMAN_CLIENT_CREDENTIALS_NEEDED: u32 = 2150858930u32;
pub const ERROR_WSMAN_CLIENT_CREDSSP_AUTHENTICATION_DISABLED: u32 = 2150859170u32;
pub const ERROR_WSMAN_CLIENT_DECODEOBJECT_NULL_PARAM: u32 = 2150858961u32;
pub const ERROR_WSMAN_CLIENT_DELIVERENDSUBSCRIPTION_NULL_PARAM: u32 = 2150858958u32;
pub const ERROR_WSMAN_CLIENT_DELIVEREVENTS_NULL_PARAM: u32 = 2150858959u32;
pub const ERROR_WSMAN_CLIENT_DIGEST_AUTHENTICATION_DISABLED: u32 = 2150858976u32;
pub const ERROR_WSMAN_CLIENT_DISABLE_LOOPBACK_WITH_EXPLICIT_CREDENTIALS: u32 = 2150859073u32;
pub const ERROR_WSMAN_CLIENT_DISCONNECTSHELL_NULL_PARAM: u32 = 2150859207u32;
pub const ERROR_WSMAN_CLIENT_ENCODEOBJECT_NULL_PARAM: u32 = 2150858962u32;
pub const ERROR_WSMAN_CLIENT_ENUMERATE_NULL_PARAM: u32 = 2150858939u32;
pub const ERROR_WSMAN_CLIENT_ENUMERATORADDEVENT_NULL_PARAM: u32 = 2150859043u32;
pub const ERROR_WSMAN_CLIENT_ENUMERATORADDOBJECT_NULL_PARAM: u32 = 2150858963u32;
pub const ERROR_WSMAN_CLIENT_ENUMERATORNEXTOBJECT_NULL_PARAM: u32 = 2150858964u32;
pub const ERROR_WSMAN_CLIENT_ENUM_RECEIVED_TOO_MANY_ITEMS: u32 = 2150859075u32;
pub const ERROR_WSMAN_CLIENT_GETBOOKMARK_NULL_PARAM: u32 = 2150858960u32;
pub const ERROR_WSMAN_CLIENT_GETERRORMESSAGE_NULL_PARAM: u32 = 2150859158u32;
pub const ERROR_WSMAN_CLIENT_GETSESSIONOPTION_DWORD_INVALID_PARAM: u32 = 2150859167u32;
pub const ERROR_WSMAN_CLIENT_GETSESSIONOPTION_DWORD_NULL_PARAM: u32 = 2150859166u32;
pub const ERROR_WSMAN_CLIENT_GETSESSIONOPTION_INVALID_PARAM: u32 = 2150859129u32;
pub const ERROR_WSMAN_CLIENT_GETSESSIONOPTION_STRING_INVALID_PARAM: u32 = 2150859168u32;
pub const ERROR_WSMAN_CLIENT_INITIALIZE_NULL_PARAM: u32 = 2150859124u32;
pub const ERROR_WSMAN_CLIENT_INVALID_CERT: u32 = 2150858935u32;
pub const ERROR_WSMAN_CLIENT_INVALID_CERT_DNS_OR_UPN: u32 = 2150859080u32;
pub const ERROR_WSMAN_CLIENT_INVALID_CLOSE_COMMAND_FLAG: u32 = 2150859133u32;
pub const ERROR_WSMAN_CLIENT_INVALID_CLOSE_SHELL_FLAG: u32 = 2150859132u32;
pub const ERROR_WSMAN_CLIENT_INVALID_CREATE_SHELL_FLAG: u32 = 2150859131u32;
pub const ERROR_WSMAN_CLIENT_INVALID_DEINIT_APPLICATION_FLAG: u32 = 2150859126u32;
pub const ERROR_WSMAN_CLIENT_INVALID_DELIVERY_RETRY: u32 = 2150859108u32;
pub const ERROR_WSMAN_CLIENT_INVALID_DISABLE_LOOPBACK: u32 = 2150859074u32;
pub const ERROR_WSMAN_CLIENT_INVALID_DISCONNECT_SHELL_FLAG: u32 = 2150859226u32;
pub const ERROR_WSMAN_CLIENT_INVALID_FLAG: u32 = 2150858924u32;
pub const ERROR_WSMAN_CLIENT_INVALID_GETERRORMESSAGE_FLAG: u32 = 2150859160u32;
pub const ERROR_WSMAN_CLIENT_INVALID_INIT_APPLICATION_FLAG: u32 = 2150859125u32;
pub const ERROR_WSMAN_CLIENT_INVALID_LANGUAGE_CODE: u32 = 2150859159u32;
pub const ERROR_WSMAN_CLIENT_INVALID_LOCALE: u32 = 2150859156u32;
pub const ERROR_WSMAN_CLIENT_INVALID_RECEIVE_SHELL_FLAG: u32 = 2150859150u32;
pub const ERROR_WSMAN_CLIENT_INVALID_RESOURCE_LOCATOR: u32 = 2150858944u32;
pub const ERROR_WSMAN_CLIENT_INVALID_RUNCOMMAND_FLAG: u32 = 2150859137u32;
pub const ERROR_WSMAN_CLIENT_INVALID_SEND_SHELL_FLAG: u32 = 2150859145u32;
pub const ERROR_WSMAN_CLIENT_INVALID_SEND_SHELL_PARAMETER: u32 = 2150859146u32;
pub const ERROR_WSMAN_CLIENT_INVALID_SHELL_COMMAND_PAIR: u32 = 2150859227u32;
pub const ERROR_WSMAN_CLIENT_INVALID_SIGNAL_SHELL_FLAG: u32 = 2150859143u32;
pub const ERROR_WSMAN_CLIENT_INVALID_UI_LANGUAGE: u32 = 2150859157u32;
pub const ERROR_WSMAN_CLIENT_KERBEROS_AUTHENTICATION_DISABLED: u32 = 2150858978u32;
pub const ERROR_WSMAN_CLIENT_LOCAL_INVALID_CONNECTION_OPTIONS: u32 = 2150858937u32;
pub const ERROR_WSMAN_CLIENT_LOCAL_INVALID_CREDS: u32 = 2150858936u32;
pub const ERROR_WSMAN_CLIENT_MAX_CHARS_TOO_SMALL: u32 = 2150858947u32;
pub const ERROR_WSMAN_CLIENT_MISSING_EXPIRATION: u32 = 2150858953u32;
pub const ERROR_WSMAN_CLIENT_MULTIPLE_AUTH_FLAGS: u32 = 2150858925u32;
pub const ERROR_WSMAN_CLIENT_MULTIPLE_DELIVERY_MODES: u32 = 2150858950u32;
pub const ERROR_WSMAN_CLIENT_MULTIPLE_ENUM_MODE_FLAGS: u32 = 2150859039u32;
pub const ERROR_WSMAN_CLIENT_MULTIPLE_ENVELOPE_POLICIES: u32 = 2150858951u32;
pub const ERROR_WSMAN_CLIENT_MULTIPLE_PROXY_AUTH_FLAGS: u32 = 2150859188u32;
pub const ERROR_WSMAN_CLIENT_NEGOTIATE_AUTHENTICATION_DISABLED: u32 = 2150858977u32;
pub const ERROR_WSMAN_CLIENT_NO_HANDLE: u32 = 2150858942u32;
pub const ERROR_WSMAN_CLIENT_NO_SOURCES: u32 = 2150859111u32;
pub const ERROR_WSMAN_CLIENT_NULL_ISSUERS: u32 = 2150859110u32;
pub const ERROR_WSMAN_CLIENT_NULL_PUBLISHERS: u32 = 2150859109u32;
pub const ERROR_WSMAN_CLIENT_NULL_RESULT_PARAM: u32 = 2150858941u32;
pub const ERROR_WSMAN_CLIENT_PULL_INVALID_FLAGS: u32 = 2150858954u32;
pub const ERROR_WSMAN_CLIENT_PUSH_HOST_TOO_LONG: u32 = 2150858956u32;
pub const ERROR_WSMAN_CLIENT_PUSH_UNSUPPORTED_TRANSPORT: u32 = 2150858955u32;
pub const ERROR_WSMAN_CLIENT_RECEIVE_NULL_PARAM: u32 = 2150859148u32;
pub const ERROR_WSMAN_CLIENT_RECONNECTSHELLCOMMAND_NULL_PARAM: u32 = 2150859218u32;
pub const ERROR_WSMAN_CLIENT_RECONNECTSHELL_NULL_PARAM: u32 = 2150859208u32;
pub const ERROR_WSMAN_CLIENT_RUNCOMMAND_NOTCOMPLETED: u32 = 2150859138u32;
pub const ERROR_WSMAN_CLIENT_RUNCOMMAND_NULL_PARAM: u32 = 2150859136u32;
pub const ERROR_WSMAN_CLIENT_SEND_NULL_PARAM: u32 = 2150859144u32;
pub const ERROR_WSMAN_CLIENT_SESSION_UNUSABLE: u32 = 2150859258u32;
pub const ERROR_WSMAN_CLIENT_SETSESSIONOPTION_INVALID_PARAM: u32 = 2150859128u32;
pub const ERROR_WSMAN_CLIENT_SETSESSIONOPTION_NULL_PARAM: u32 = 2150859127u32;
pub const ERROR_WSMAN_CLIENT_SIGNAL_NULL_PARAM: u32 = 2150859142u32;
pub const ERROR_WSMAN_CLIENT_SPN_WRONG_AUTH: u32 = 2150858926u32;
pub const ERROR_WSMAN_CLIENT_SUBSCRIBE_NULL_PARAM: u32 = 2150858940u32;
pub const ERROR_WSMAN_CLIENT_UNENCRYPTED_DISABLED: u32 = 2150858974u32;
pub const ERROR_WSMAN_CLIENT_UNENCRYPTED_HTTP_ONLY: u32 = 2150858967u32;
pub const ERROR_WSMAN_CLIENT_UNKNOWN_EXPIRATION_TYPE: u32 = 2150858952u32;
pub const ERROR_WSMAN_CLIENT_USERNAME_AND_PASSWORD_NEEDED: u32 = 2150859079u32;
pub const ERROR_WSMAN_CLIENT_USERNAME_PASSWORD_NEEDED: u32 = 2150858928u32;
pub const ERROR_WSMAN_CLIENT_WORKGROUP_NO_KERBEROS: u32 = 2150859020u32;
pub const ERROR_WSMAN_CLIENT_ZERO_HEARTBEAT: u32 = 2150858949u32;
pub const ERROR_WSMAN_COMMAND_ALREADY_CLOSED: u32 = 2150859087u32;
pub const ERROR_WSMAN_COMMAND_TERMINATED: u32 = 2150859212u32;
pub const ERROR_WSMAN_CONCURRENCY: u32 = 2150858802u32;
pub const ERROR_WSMAN_CONFIG_CANNOT_CHANGE_CERTMAPPING_KEYS: u32 = 2150859122u32;
pub const ERROR_WSMAN_CONFIG_CANNOT_CHANGE_GPO_CONTROLLED_SETTING: u32 = 2150858890u32;
pub const ERROR_WSMAN_CONFIG_CANNOT_CHANGE_MUTUAL: u32 = 2150858885u32;
pub const ERROR_WSMAN_CONFIG_CANNOT_SHARE_SSL_CONFIG: u32 = 2150858984u32;
pub const ERROR_WSMAN_CONFIG_CERT_CN_DOES_NOT_MATCH_HOSTNAME: u32 = 2150858985u32;
pub const ERROR_WSMAN_CONFIG_CORRUPTED: u32 = 2150858757u32;
pub const ERROR_WSMAN_CONFIG_GROUP_POLICY_CHANGE_NOTIFICATION_SUBSCRIPTION_FAILED: u32 = 2150859217u32;
pub const ERROR_WSMAN_CONFIG_HOSTNAME_CHANGE_WITHOUT_CERT: u32 = 2150858986u32;
pub const ERROR_WSMAN_CONFIG_PORT_INVALID: u32 = 2150858972u32;
pub const ERROR_WSMAN_CONFIG_READONLY_PROPERTY: u32 = 2150859071u32;
pub const ERROR_WSMAN_CONFIG_SHELLURI_INVALID_OPERATION_ON_KEY: u32 = 2150859119u32;
pub const ERROR_WSMAN_CONFIG_SHELLURI_INVALID_PROCESSPATH: u32 = 2150859098u32;
pub const ERROR_WSMAN_CONFIG_SHELL_URI_CMDSHELLURI_NOTPERMITTED: u32 = 2150859097u32;
pub const ERROR_WSMAN_CONFIG_SHELL_URI_INVALID: u32 = 2150859096u32;
pub const ERROR_WSMAN_CONFIG_THUMBPRINT_SHOULD_BE_EMPTY: u32 = 2150858987u32;
pub const ERROR_WSMAN_CONNECTIONSTR_INVALID: u32 = 2150858969u32;
pub const ERROR_WSMAN_CONNECTOR_GET: u32 = 2150858873u32;
pub const ERROR_WSMAN_CREATESHELL_NULL_ENVIRONMENT_VARIABLE_NAME: u32 = 2150859081u32;
pub const ERROR_WSMAN_CREATESHELL_NULL_STREAMID: u32 = 2150859083u32;
pub const ERROR_WSMAN_CREATESHELL_RUNAS_FAILED: u32 = 2150859231u32;
pub const ERROR_WSMAN_CREATE_RESPONSE_NO_EPR: u32 = 2150858992u32;
pub const ERROR_WSMAN_CREDSSP_USERNAME_PASSWORD_NEEDED: u32 = 2150859169u32;
pub const ERROR_WSMAN_CREDS_PASSED_WITH_NO_AUTH_FLAG: u32 = 2150858923u32;
pub const ERROR_WSMAN_CUSTOMREMOTESHELL_DEPRECATED: u32 = 2150859196u32;
pub const ERROR_WSMAN_DEFAULTAUTH_IPADDRESS: u32 = 2150859195u32;
pub const ERROR_WSMAN_DELIVERY_REFUSED: u32 = 2150858804u32;
pub const ERROR_WSMAN_DELIVERY_RETRIES_NOT_SUPPORTED: u32 = 2150858857u32;
pub const ERROR_WSMAN_DELIVER_IN_PROGRESS: u32 = 2150858821u32;
pub const ERROR_WSMAN_DEPRECATED_CONFIG_SETTING: u32 = 2150859182u32;
pub const ERROR_WSMAN_DESERIALIZE_CLASS: u32 = 2150859244u32;
pub const ERROR_WSMAN_DESTINATION_INVALID: u32 = 2150859256u32;
pub const ERROR_WSMAN_DESTINATION_UNREACHABLE: u32 = 2150858770u32;
pub const ERROR_WSMAN_DIFFERENT_AUTHZ_TOKEN: u32 = 2150859177u32;
pub const ERROR_WSMAN_DIFFERENT_CIM_SELECTOR: u32 = 2150859067u32;
pub const ERROR_WSMAN_DUPLICATE_SELECTORS: u32 = 2150858847u32;
pub const ERROR_WSMAN_ENCODING_LIMIT: u32 = 2150858805u32;
pub const ERROR_WSMAN_ENCODING_TYPE: u32 = 2150859033u32;
pub const ERROR_WSMAN_ENDPOINT_UNAVAILABLE: u32 = 2150858772u32;
pub const ERROR_WSMAN_ENDPOINT_UNAVAILABLE_INVALID_VALUE: u32 = 2150859034u32;
pub const ERROR_WSMAN_ENUMERATE_CANNOT_PROCESS_FILTER: u32 = 2150858778u32;
pub const ERROR_WSMAN_ENUMERATE_FILTERING_NOT_SUPPORTED: u32 = 2150858776u32;
pub const ERROR_WSMAN_ENUMERATE_FILTER_DIALECT_REQUESTED_UNAVAILABLE: u32 = 2150858777u32;
pub const ERROR_WSMAN_ENUMERATE_INVALID_ENUMERATION_CONTEXT: u32 = 2150858779u32;
pub const ERROR_WSMAN_ENUMERATE_INVALID_EXPIRATION_TIME: u32 = 2150858774u32;
pub const ERROR_WSMAN_ENUMERATE_SHELLCOMAMNDS_FILTER_EXPECTED: u32 = 2150859200u32;
pub const ERROR_WSMAN_ENUMERATE_SHELLCOMMANDS_EPRS_NOTSUPPORTED: u32 = 2150859201u32;
pub const ERROR_WSMAN_ENUMERATE_TIMED_OUT: u32 = 2150858780u32;
pub const ERROR_WSMAN_ENUMERATE_UNABLE_TO_RENEW: u32 = 2150858781u32;
pub const ERROR_WSMAN_ENUMERATE_UNSUPPORTED_EXPIRATION_TIME: u32 = 2150858775u32;
pub const ERROR_WSMAN_ENUMERATE_UNSUPPORTED_EXPIRATION_TYPE: u32 = 2150859036u32;
pub const ERROR_WSMAN_ENUMERATE_WMI_INVALID_KEY: u32 = 2150859016u32;
pub const ERROR_WSMAN_ENUMERATION_CLOSED: u32 = 2150858759u32;
pub const ERROR_WSMAN_ENUMERATION_INITIALIZING: u32 = 2150858872u32;
pub const ERROR_WSMAN_ENUMERATION_INVALID: u32 = 2150858884u32;
pub const ERROR_WSMAN_ENUMERATION_MODE_UNSUPPORTED: u32 = 2150858886u32;
pub const ERROR_WSMAN_ENVELOPE_TOO_LARGE: u32 = 2150858790u32;
pub const ERROR_WSMAN_EPR_NESTING_EXCEEDED: u32 = 2150858879u32;
pub const ERROR_WSMAN_EVENTING_CONCURRENT_CLIENT_RECEIVE: u32 = 2150858891u32;
pub const ERROR_WSMAN_EVENTING_DELIVERYFAILED_FROMSOURCE: u32 = 2150858908u32;
pub const ERROR_WSMAN_EVENTING_DELIVERY_MODE_REQUESTED_INVALID: u32 = 2150858920u32;
pub const ERROR_WSMAN_EVENTING_DELIVERY_MODE_REQUESTED_UNAVAILABLE: u32 = 2150858782u32;
pub const ERROR_WSMAN_EVENTING_FAST_SENDER: u32 = 2150858892u32;
pub const ERROR_WSMAN_EVENTING_FILTERING_NOT_SUPPORTED: u32 = 2150858785u32;
pub const ERROR_WSMAN_EVENTING_FILTERING_REQUESTED_UNAVAILABLE: u32 = 2150858786u32;
pub const ERROR_WSMAN_EVENTING_INCOMPATIBLE_BATCHPARAMS_AND_DELIVERYMODE: u32 = 2150858900u32;
pub const ERROR_WSMAN_EVENTING_INSECURE_PUSHSUBSCRIPTION_CONNECTION: u32 = 2150858893u32;
pub const ERROR_WSMAN_EVENTING_INVALID_ENCODING_IN_DELIVERY: u32 = 2150859255u32;
pub const ERROR_WSMAN_EVENTING_INVALID_ENDTO_ADDRESSS: u32 = 2150858902u32;
pub const ERROR_WSMAN_EVENTING_INVALID_EVENTSOURCE: u32 = 2150858894u32;
pub const ERROR_WSMAN_EVENTING_INVALID_EXPIRATION_TIME: u32 = 2150858783u32;
pub const ERROR_WSMAN_EVENTING_INVALID_HEARTBEAT: u32 = 2150858916u32;
pub const ERROR_WSMAN_EVENTING_INVALID_INCOMING_EVENT_PACKET_HEADER: u32 = 2150858903u32;
pub const ERROR_WSMAN_EVENTING_INVALID_LOCALE_IN_DELIVERY: u32 = 2150858915u32;
pub const ERROR_WSMAN_EVENTING_INVALID_MESSAGE: u32 = 2150858789u32;
pub const ERROR_WSMAN_EVENTING_INVALID_NOTIFYTO_ADDRESSS: u32 = 2150858914u32;
pub const ERROR_WSMAN_EVENTING_LOOPBACK_TESTFAILED: u32 = 2150858901u32;
pub const ERROR_WSMAN_EVENTING_MISSING_LOCALE_IN_DELIVERY: u32 = 2150859028u32;
pub const ERROR_WSMAN_EVENTING_MISSING_NOTIFYTO: u32 = 2150858912u32;
pub const ERROR_WSMAN_EVENTING_MISSING_NOTIFYTO_ADDRESSS: u32 = 2150858913u32;
pub const ERROR_WSMAN_EVENTING_NOMATCHING_LISTENER: u32 = 2150858895u32;
pub const ERROR_WSMAN_EVENTING_NONDOMAINJOINED_COLLECTOR: u32 = 2150859070u32;
pub const ERROR_WSMAN_EVENTING_NONDOMAINJOINED_PUBLISHER: u32 = 2150859069u32;
pub const ERROR_WSMAN_EVENTING_SOURCE_UNABLE_TO_PROCESS: u32 = 2150858787u32;
pub const ERROR_WSMAN_EVENTING_SUBSCRIPTIONCLOSED_BYREMOTESERVICE: u32 = 2150858907u32;
pub const ERROR_WSMAN_EVENTING_SUBSCRIPTION_CANCELLED_BYSOURCE: u32 = 2150858910u32;
pub const ERROR_WSMAN_EVENTING_UNABLE_TO_RENEW: u32 = 2150858788u32;
pub const ERROR_WSMAN_EVENTING_UNSUPPORTED_EXPIRATION_TYPE: u32 = 2150858784u32;
pub const ERROR_WSMAN_EXPIRATION_TIME_NOT_SUPPORTED: u32 = 2150858856u32;
pub const ERROR_WSMAN_EXPLICIT_CREDENTIALS_REQUIRED: u32 = 2150858981u32;
pub const ERROR_WSMAN_FAILED_AUTHENTICATION: u32 = 2150858806u32;
pub const ERROR_WSMAN_FEATURE_DEPRECATED: u32 = 2150859197u32;
pub const ERROR_WSMAN_FILE_NOT_PRESENT: u32 = 2150859154u32;
pub const ERROR_WSMAN_FILTERING_REQUIRED: u32 = 2150858831u32;
pub const ERROR_WSMAN_FILTERING_REQUIRED_NOT_SUPPORTED: u32 = 2150858864u32;
pub const ERROR_WSMAN_FORMAT_MISMATCH_NOT_SUPPORTED: u32 = 2150858866u32;
pub const ERROR_WSMAN_FORMAT_SECURITY_TOKEN_NOT_SUPPORTED: u32 = 2150858867u32;
pub const ERROR_WSMAN_FRAGMENT_DIALECT_REQUESTED_UNAVAILABLE: u32 = 2150858896u32;
pub const ERROR_WSMAN_FRAGMENT_TRANSFER_NOT_SUPPORTED: u32 = 2150858871u32;
pub const ERROR_WSMAN_GETCLASS: u32 = 2150859245u32;
pub const ERROR_WSMAN_HEARTBEATS_NOT_SUPPORTED: u32 = 2150858858u32;
pub const ERROR_WSMAN_HTML_ERROR: u32 = 2150859123u32;
pub const ERROR_WSMAN_HTTP_CONTENT_TYPE_MISSMATCH_RESPONSE_DATA: u32 = 2150859000u32;
pub const ERROR_WSMAN_HTTP_INVALID_CONTENT_TYPE_IN_RESPONSE_DATA: u32 = 2150858999u32;
pub const ERROR_WSMAN_HTTP_NOT_FOUND_STATUS: u32 = 2150859027u32;
pub const ERROR_WSMAN_HTTP_NO_RESPONSE_DATA: u32 = 2150858997u32;
pub const ERROR_WSMAN_HTTP_REQUEST_TOO_LARGE_STATUS: u32 = 2150859025u32;
pub const ERROR_WSMAN_HTTP_SERVICE_UNAVAILABLE_STATUS: u32 = 2150859026u32;
pub const ERROR_WSMAN_HTTP_STATUS_BAD_REQUEST: u32 = 2150859121u32;
pub const ERROR_WSMAN_HTTP_STATUS_SERVER_ERROR: u32 = 2150859120u32;
pub const ERROR_WSMAN_IISCONFIGURATION_READ_FAILED: u32 = 2150859155u32;
pub const ERROR_WSMAN_INCOMPATIBLE_EPR: u32 = 2150858807u32;
pub const ERROR_WSMAN_INEXISTENT_MAC_ADDRESS: u32 = 2150858875u32;
pub const ERROR_WSMAN_INSECURE_ADDRESS_NOT_SUPPORTED: u32 = 2150858865u32;
pub const ERROR_WSMAN_INSUFFCIENT_SELECTORS: u32 = 2150858842u32;
pub const ERROR_WSMAN_INSUFFICIENT_METADATA_FOR_BASIC: u32 = 2150859251u32;
pub const ERROR_WSMAN_INVALID_ACTIONURI: u32 = 2150858753u32;
pub const ERROR_WSMAN_INVALID_BATCH_PARAMETER: u32 = 2150858799u32;
pub const ERROR_WSMAN_INVALID_BATCH_SETTINGS_PARAMETER: u32 = 2150859021u32;
pub const ERROR_WSMAN_INVALID_BOOKMARK: u32 = 2150858808u32;
pub const ERROR_WSMAN_INVALID_CHARACTERS_IN_RESPONSE: u32 = 2150859018u32;
pub const ERROR_WSMAN_INVALID_CONFIGSDDL_URL: u32 = 2150859199u32;
pub const ERROR_WSMAN_INVALID_CONNECTIONRETRY: u32 = 2150859103u32;
pub const ERROR_WSMAN_INVALID_FILEPATH: u32 = 2150859153u32;
pub const ERROR_WSMAN_INVALID_FILTER_XML: u32 = 2150859015u32;
pub const ERROR_WSMAN_INVALID_FRAGMENT_DIALECT: u32 = 2150858898u32;
pub const ERROR_WSMAN_INVALID_FRAGMENT_PATH: u32 = 2150858899u32;
pub const ERROR_WSMAN_INVALID_FRAGMENT_PATH_BLANK: u32 = 2150859017u32;
pub const ERROR_WSMAN_INVALID_HEADER: u32 = 2150859035u32;
pub const ERROR_WSMAN_INVALID_HOSTNAME_PATTERN: u32 = 2150858911u32;
pub const ERROR_WSMAN_INVALID_IPFILTER: u32 = 2150858988u32;
pub const ERROR_WSMAN_INVALID_KEY: u32 = 2150858820u32;
pub const ERROR_WSMAN_INVALID_LITERAL_URI: u32 = 2150859252u32;
pub const ERROR_WSMAN_INVALID_MESSAGE_INFORMATION_HEADER: u32 = 2150858767u32;
pub const ERROR_WSMAN_INVALID_OPTIONS: u32 = 2150858809u32;
pub const ERROR_WSMAN_INVALID_OPTIONSET: u32 = 2150859140u32;
pub const ERROR_WSMAN_INVALID_OPTION_NO_PROXY_SERVER: u32 = 2150859165u32;
pub const ERROR_WSMAN_INVALID_PARAMETER: u32 = 2150858810u32;
pub const ERROR_WSMAN_INVALID_PARAMETER_NAME: u32 = 2150858837u32;
pub const ERROR_WSMAN_INVALID_PROPOSED_ID: u32 = 2150858798u32;
pub const ERROR_WSMAN_INVALID_PROVIDER_RESPONSE: u32 = 2150859117u32;
pub const ERROR_WSMAN_INVALID_PUBLISHERS_TYPE: u32 = 2150859107u32;
pub const ERROR_WSMAN_INVALID_REDIRECT_ERROR: u32 = 2150859189u32;
pub const ERROR_WSMAN_INVALID_REPRESENTATION: u32 = 2150858773u32;
pub const ERROR_WSMAN_INVALID_RESOURCE_URI: u32 = 2150858811u32;
pub const ERROR_WSMAN_INVALID_RESUMPTION_CONTEXT: u32 = 2150858792u32;
pub const ERROR_WSMAN_INVALID_SECURITY_DESCRIPTOR: u32 = 2150859100u32;
pub const ERROR_WSMAN_INVALID_SELECTORS: u32 = 2150858813u32;
pub const ERROR_WSMAN_INVALID_SELECTOR_NAME: u32 = 2150859032u32;
pub const ERROR_WSMAN_INVALID_SELECTOR_VALUE: u32 = 2150858845u32;
pub const ERROR_WSMAN_INVALID_SOAP_BODY: u32 = 2150858791u32;
pub const ERROR_WSMAN_INVALID_SUBSCRIBE_OBJECT: u32 = 2150859112u32;
pub const ERROR_WSMAN_INVALID_SUBSCRIPTION_MANAGER: u32 = 2150859006u32;
pub const ERROR_WSMAN_INVALID_SYSTEM: u32 = 2150858812u32;
pub const ERROR_WSMAN_INVALID_TARGET_RESOURCEURI: u32 = 2150858849u32;
pub const ERROR_WSMAN_INVALID_TARGET_SELECTORS: u32 = 2150858848u32;
pub const ERROR_WSMAN_INVALID_TARGET_SYSTEM: u32 = 2150858850u32;
pub const ERROR_WSMAN_INVALID_TIMEOUT_HEADER: u32 = 2150858881u32;
pub const ERROR_WSMAN_INVALID_URI: u32 = 2150858754u32;
pub const ERROR_WSMAN_INVALID_URI_WMI_ENUM_WQL: u32 = 2150859003u32;
pub const ERROR_WSMAN_INVALID_URI_WMI_SINGLETON: u32 = 2150859002u32;
pub const ERROR_WSMAN_INVALID_USESSL_PARAM: u32 = 2150859198u32;
pub const ERROR_WSMAN_INVALID_XML: u32 = 2150858819u32;
pub const ERROR_WSMAN_INVALID_XML_FRAGMENT: u32 = 2150858841u32;
pub const ERROR_WSMAN_INVALID_XML_MISSING_VALUES: u32 = 2150858839u32;
pub const ERROR_WSMAN_INVALID_XML_NAMESPACE: u32 = 2150858840u32;
pub const ERROR_WSMAN_INVALID_XML_RUNAS_DISABLED: u32 = 2150859232u32;
pub const ERROR_WSMAN_INVALID_XML_VALUES: u32 = 2150858838u32;
pub const ERROR_WSMAN_KERBEROS_IPADDRESS: u32 = 2150859019u32;
pub const ERROR_WSMAN_LISTENER_ADDRESS_INVALID: u32 = 2150858889u32;
pub const ERROR_WSMAN_LOCALE_NOT_SUPPORTED: u32 = 2150858855u32;
pub const ERROR_WSMAN_MACHINE_OPTION_REQUIRED: u32 = 2150858917u32;
pub const ERROR_WSMAN_MAXENVELOPE_POLICY_NOT_SUPPORTED: u32 = 2150858863u32;
pub const ERROR_WSMAN_MAXENVELOPE_SIZE_NOT_SUPPORTED: u32 = 2150858862u32;
pub const ERROR_WSMAN_MAXITEMS_NOT_SUPPORTED: u32 = 2150858860u32;
pub const ERROR_WSMAN_MAXTIME_NOT_SUPPORTED: u32 = 2150858861u32;
pub const ERROR_WSMAN_MAX_ELEMENTS_NOT_SUPPORTED: u32 = 2150859037u32;
pub const ERROR_WSMAN_MAX_ENVELOPE_SIZE: u32 = 2150858823u32;
pub const ERROR_WSMAN_MAX_ENVELOPE_SIZE_EXCEEDED: u32 = 2150858824u32;
pub const ERROR_WSMAN_MESSAGE_INFORMATION_HEADER_REQUIRED: u32 = 2150858769u32;
pub const ERROR_WSMAN_METADATA_REDIRECT: u32 = 2150858814u32;
pub const ERROR_WSMAN_MIN_ENVELOPE_SIZE: u32 = 2150858878u32;
pub const ERROR_WSMAN_MISSING_CLASSNAME: u32 = 2150859254u32;
pub const ERROR_WSMAN_MISSING_FRAGMENT_PATH: u32 = 2150858897u32;
pub const ERROR_WSMAN_MULTIPLE_CREDENTIALS: u32 = 2150859076u32;
pub const ERROR_WSMAN_MUSTUNDERSTAND_ON_LOCALE_UNSUPPORTED: u32 = 2150858887u32;
pub const ERROR_WSMAN_MUTUAL_AUTH_FAILED: u32 = 2150859248u32;
pub const ERROR_WSMAN_NAME_NOT_RESOLVED: u32 = 2150859193u32;
pub const ERROR_WSMAN_NETWORK_TIMEDOUT: u32 = 2150859046u32;
pub const ERROR_WSMAN_NEW_DESERIALIZER: u32 = 2150859243u32;
pub const ERROR_WSMAN_NEW_SESSION: u32 = 2150859246u32;
pub const ERROR_WSMAN_NON_PULL_SUBSCRIPTION_NOT_SUPPORTED: u32 = 2150859007u32;
pub const ERROR_WSMAN_NO_ACK: u32 = 2150858800u32;
pub const ERROR_WSMAN_NO_CERTMAPPING_OPERATION_FOR_LOCAL_SESSION: u32 = 2150859090u32;
pub const ERROR_WSMAN_NO_COMMANDID: u32 = 2150859141u32;
pub const ERROR_WSMAN_NO_COMMAND_RESPONSE: u32 = 2150859139u32;
pub const ERROR_WSMAN_NO_DHCP_ADDRESSES: u32 = 2150858877u32;
pub const ERROR_WSMAN_NO_IDENTIFY_FOR_LOCAL_SESSION: u32 = 2150859004u32;
pub const ERROR_WSMAN_NO_PUSH_SUBSCRIPTION_FOR_LOCAL_SESSION: u32 = 2150859005u32;
pub const ERROR_WSMAN_NO_RECEIVE_RESPONSE: u32 = 2150859151u32;
pub const ERROR_WSMAN_NO_UNICAST_ADDRESSES: u32 = 2150858876u32;
pub const ERROR_WSMAN_NULL_KEY: u32 = 2150859247u32;
pub const ERROR_WSMAN_OBJECTONLY_INVALID: u32 = 2150859253u32;
pub const ERROR_WSMAN_OPERATION_TIMEDOUT: u32 = 2150858793u32;
pub const ERROR_WSMAN_OPERATION_TIMEOUT_NOT_SUPPORTED: u32 = 2150858854u32;
pub const ERROR_WSMAN_OPTIONS_INVALID_NAME: u32 = 2150858834u32;
pub const ERROR_WSMAN_OPTIONS_INVALID_VALUE: u32 = 2150858835u32;
pub const ERROR_WSMAN_OPTIONS_NOT_SUPPORTED: u32 = 2150858833u32;
pub const ERROR_WSMAN_OPTION_LIMIT: u32 = 2150858827u32;
pub const ERROR_WSMAN_PARAMETER_TYPE_MISMATCH: u32 = 2150858836u32;
pub const ERROR_WSMAN_PLUGIN_CONFIGURATION_CORRUPTED: u32 = 2150859152u32;
pub const ERROR_WSMAN_PLUGIN_FAILED: u32 = 2150858883u32;
pub const ERROR_WSMAN_POLICY_CANNOT_COMPLY: u32 = 2150859102u32;
pub const ERROR_WSMAN_POLICY_CORRUPTED: u32 = 2150858888u32;
pub const ERROR_WSMAN_POLICY_TOO_COMPLEX: u32 = 2150859101u32;
pub const ERROR_WSMAN_POLYMORPHISM_MODE_UNSUPPORTED: u32 = 2150859063u32;
pub const ERROR_WSMAN_PORT_INVALID: u32 = 2150858971u32;
pub const ERROR_WSMAN_PROVIDER_FAILURE: u32 = 2150858755u32;
pub const ERROR_WSMAN_PROVIDER_LOAD_FAILED: u32 = 2150858906u32;
pub const ERROR_WSMAN_PROVSYS_NOT_SUPPORTED: u32 = 2150858921u32;
pub const ERROR_WSMAN_PROXY_ACCESS_TYPE: u32 = 2150859164u32;
pub const ERROR_WSMAN_PROXY_AUTHENTICATION_INVALID_FLAG: u32 = 2150859162u32;
pub const ERROR_WSMAN_PUBLIC_FIREWALL_PROFILE_ACTIVE: u32 = 2150859113u32;
pub const ERROR_WSMAN_PULL_IN_PROGRESS: u32 = 2150858758u32;
pub const ERROR_WSMAN_PULL_PARAMS_NOT_SAME_AS_ENUM: u32 = 2150859181u32;
pub const ERROR_WSMAN_PUSHSUBSCRIPTION_INVALIDUSERACCOUNT: u32 = 2150859068u32;
pub const ERROR_WSMAN_PUSH_SUBSCRIPTION_CONFIG_INVALID: u32 = 2150858922u32;
pub const ERROR_WSMAN_QUICK_CONFIG_FAILED_CERT_REQUIRED: u32 = 2150859029u32;
pub const ERROR_WSMAN_QUICK_CONFIG_FIREWALL_EXCEPTIONS_DISALLOWED: u32 = 2150859030u32;
pub const ERROR_WSMAN_QUICK_CONFIG_LOCAL_POLICY_CHANGE_DISALLOWED: u32 = 2150859031u32;
pub const ERROR_WSMAN_QUOTA_LIMIT: u32 = 2150858815u32;
pub const ERROR_WSMAN_QUOTA_MAX_COMMANDS_PER_SHELL_PPQ: u32 = 2150859241u32;
pub const ERROR_WSMAN_QUOTA_MAX_OPERATIONS: u32 = 2150859174u32;
pub const ERROR_WSMAN_QUOTA_MAX_OPERATIONS_USER_PPQ: u32 = 2150859240u32;
pub const ERROR_WSMAN_QUOTA_MAX_PLUGINOPERATIONS_PPQ: u32 = 2150859239u32;
pub const ERROR_WSMAN_QUOTA_MAX_PLUGINSHELLS_PPQ: u32 = 2150859238u32;
pub const ERROR_WSMAN_QUOTA_MAX_SHELLS: u32 = 2150859173u32;
pub const ERROR_WSMAN_QUOTA_MAX_SHELLS_PPQ: u32 = 2150859236u32;
pub const ERROR_WSMAN_QUOTA_MAX_SHELLUSERS: u32 = 2150859179u32;
pub const ERROR_WSMAN_QUOTA_MAX_USERS_PPQ: u32 = 2150859237u32;
pub const ERROR_WSMAN_QUOTA_MIN_REQUIREMENT_NOT_AVAILABLE_PPQ: u32 = 2150859242u32;
pub const ERROR_WSMAN_QUOTA_SYSTEM: u32 = 2150859176u32;
pub const ERROR_WSMAN_QUOTA_USER: u32 = 2150859175u32;
pub const ERROR_WSMAN_REDIRECT_LOCATION_NOT_AVAILABLE: u32 = 2150859178u32;
pub const ERROR_WSMAN_REDIRECT_REQUESTED: u32 = 2150859161u32;
pub const ERROR_WSMAN_REMOTESHELLS_NOT_ALLOWED: u32 = 2150859180u32;
pub const ERROR_WSMAN_REMOTE_CIMPATH_NOT_SUPPORTED: u32 = 2150859009u32;
pub const ERROR_WSMAN_REMOTE_CONNECTION_NOT_ALLOWED: u32 = 2150859235u32;
pub const ERROR_WSMAN_RENAME_FAILURE: u32 = 2150858816u32;
pub const ERROR_WSMAN_REQUEST_INIT_ERROR: u32 = 2150858880u32;
pub const ERROR_WSMAN_REQUEST_NOT_SUPPORTED_AT_SERVICE: u32 = 2150859064u32;
pub const ERROR_WSMAN_RESOURCE_NOT_FOUND: u32 = 2150858752u32;
pub const ERROR_WSMAN_RESPONSE_INVALID_ENUMERATION_CONTEXT: u32 = 2150858993u32;
pub const ERROR_WSMAN_RESPONSE_INVALID_MESSAGE_INFORMATION_HEADER: u32 = 2150858995u32;
pub const ERROR_WSMAN_RESPONSE_INVALID_SOAP_FAULT: u32 = 2150858998u32;
pub const ERROR_WSMAN_RESPONSE_NO_RESULTS: u32 = 2150858991u32;
pub const ERROR_WSMAN_RESPONSE_NO_SOAP_HEADER_BODY: u32 = 2150858996u32;
pub const ERROR_WSMAN_RESPONSE_NO_XML_FRAGMENT_WRAPPER: u32 = 2150858994u32;
pub const ERROR_WSMAN_RESUMPTION_NOT_SUPPORTED: u32 = 2150858794u32;
pub const ERROR_WSMAN_RESUMPTION_TYPE_NOT_SUPPORTED: u32 = 2150858795u32;
pub const ERROR_WSMAN_RUNASUSER_MANAGEDACCOUNT_LOGON_FAILED: u32 = 2150859261u32;
pub const ERROR_WSMAN_RUNAS_INVALIDUSERCREDENTIALS: u32 = 2150859203u32;
pub const ERROR_WSMAN_RUNSHELLCOMMAND_NULL_ARGUMENT: u32 = 2150859086u32;
pub const ERROR_WSMAN_SCHEMA_VALIDATION_ERROR: u32 = 2150858817u32;
pub const ERROR_WSMAN_SECURITY_UNMAPPED: u32 = 2150858909u32;
pub const ERROR_WSMAN_SELECTOR_LIMIT: u32 = 2150858826u32;
pub const ERROR_WSMAN_SELECTOR_TYPEMISMATCH: u32 = 2150858844u32;
pub const ERROR_WSMAN_SEMANTICCALLBACK_TIMEDOUT: u32 = 2150859228u32;
pub const ERROR_WSMAN_SENDHEARBEAT_EMPTY_ENUMERATOR: u32 = 2150858973u32;
pub const ERROR_WSMAN_SENDSHELLINPUT_INVALID_STREAMID_INDEX: u32 = 2150859088u32;
pub const ERROR_WSMAN_SERVER_DESTINATION_LOCALHOST: u32 = 2150859022u32;
pub const ERROR_WSMAN_SERVER_ENVELOPE_LIMIT: u32 = 2150858825u32;
pub const ERROR_WSMAN_SERVER_NONPULLSUBSCRIBE_NULL_PARAM: u32 = 2150858966u32;
pub const ERROR_WSMAN_SERVER_NOT_TRUSTED: u32 = 2150858980u32;
pub const ERROR_WSMAN_SERVICE_REMOTE_ACCESS_DISABLED: u32 = 2150859229u32;
pub const ERROR_WSMAN_SERVICE_STREAM_DISCONNECTED: u32 = 2150859230u32;
pub const ERROR_WSMAN_SESSION_ALREADY_CLOSED: u32 = 2150858904u32;
pub const ERROR_WSMAN_SHELL_ALREADY_CLOSED: u32 = 2150859082u32;
pub const ERROR_WSMAN_SHELL_INVALID_COMMAND_HANDLE: u32 = 2150859085u32;
pub const ERROR_WSMAN_SHELL_INVALID_DESIRED_STREAMS: u32 = 2150859149u32;
pub const ERROR_WSMAN_SHELL_INVALID_INPUT_STREAM: u32 = 2150859147u32;
pub const ERROR_WSMAN_SHELL_INVALID_SHELL_HANDLE: u32 = 2150859084u32;
pub const ERROR_WSMAN_SHELL_NOT_INITIALIZED: u32 = 2150859118u32;
pub const ERROR_WSMAN_SHELL_SYNCHRONOUS_NOT_SUPPORTED: u32 = 2150859089u32;
pub const ERROR_WSMAN_SOAP_DATA_ENCODING_UNKNOWN: u32 = 2150858766u32;
pub const ERROR_WSMAN_SOAP_FAULT_MUST_UNDERSTAND: u32 = 2150858768u32;
pub const ERROR_WSMAN_SOAP_VERSION_MISMATCH: u32 = 2150858765u32;
pub const ERROR_WSMAN_SSL_CONNECTION_ABORTED: u32 = 2150859194u32;
pub const ERROR_WSMAN_SUBSCRIBE_WMI_INVALID_KEY: u32 = 2150859225u32;
pub const ERROR_WSMAN_SUBSCRIPTION_CLIENT_DID_NOT_CALL_WITHIN_HEARTBEAT: u32 = 2150858762u32;
pub const ERROR_WSMAN_SUBSCRIPTION_CLOSED: u32 = 2150858760u32;
pub const ERROR_WSMAN_SUBSCRIPTION_CLOSE_IN_PROGRESS: u32 = 2150858761u32;
pub const ERROR_WSMAN_SUBSCRIPTION_LISTENER_NOLONGERVALID: u32 = 2150858905u32;
pub const ERROR_WSMAN_SUBSCRIPTION_NO_HEARTBEAT: u32 = 2150858763u32;
pub const ERROR_WSMAN_SYSTEM_NOT_FOUND: u32 = 2150858822u32;
pub const ERROR_WSMAN_TARGET_ALREADY_EXISTS: u32 = 2150858851u32;
pub const ERROR_WSMAN_TRANSPORT_NOT_SUPPORTED: u32 = 2150858970u32;
pub const ERROR_WSMAN_UNEXPECTED_SELECTORS: u32 = 2150858843u32;
pub const ERROR_WSMAN_UNKNOWN_HTTP_STATUS_RETURNED: u32 = 2150859023u32;
pub const ERROR_WSMAN_UNREPORTABLE_SUCCESS: u32 = 2150858829u32;
pub const ERROR_WSMAN_UNSUPPORTED_ADDRESSING_MODE: u32 = 2150858870u32;
pub const ERROR_WSMAN_UNSUPPORTED_ENCODING: u32 = 2150858796u32;
pub const ERROR_WSMAN_UNSUPPORTED_FEATURE: u32 = 2150858818u32;
pub const ERROR_WSMAN_UNSUPPORTED_FEATURE_IDENTIFY: u32 = 2150859257u32;
pub const ERROR_WSMAN_UNSUPPORTED_FEATURE_OPTIONS: u32 = 2150858918u32;
pub const ERROR_WSMAN_UNSUPPORTED_HTTP_STATUS_REDIRECT: u32 = 2150859024u32;
pub const ERROR_WSMAN_UNSUPPORTED_MEDIA: u32 = 2150858869u32;
pub const ERROR_WSMAN_UNSUPPORTED_OCTETTYPE: u32 = 2150859249u32;
pub const ERROR_WSMAN_UNSUPPORTED_TIMEOUT: u32 = 2150858764u32;
pub const ERROR_WSMAN_UNSUPPORTED_TYPE: u32 = 2150859234u32;
pub const ERROR_WSMAN_URISECURITY_INVALIDURIKEY: u32 = 2150859104u32;
pub const ERROR_WSMAN_URI_LIMIT: u32 = 2150858797u32;
pub const ERROR_WSMAN_URI_NON_DMTF_CLASS: u32 = 2150859065u32;
pub const ERROR_WSMAN_URI_QUERY_STRING_SYNTAX_ERROR: u32 = 2150858874u32;
pub const ERROR_WSMAN_URI_SECURITY_URI: u32 = 2150859183u32;
pub const ERROR_WSMAN_URI_WRONG_DMTF_VERSION: u32 = 2150859066u32;
pub const ERROR_WSMAN_VIRTUALACCOUNT_NOTSUPPORTED: u32 = 2150859259u32;
pub const ERROR_WSMAN_VIRTUALACCOUNT_NOTSUPPORTED_DOWNLEVEL: u32 = 2150859260u32;
pub const ERROR_WSMAN_WHITESPACE: u32 = 2150858830u32;
pub const ERROR_WSMAN_WMI_CANNOT_CONNECT_ACCESS_DENIED: u32 = 2150859014u32;
pub const ERROR_WSMAN_WMI_INVALID_VALUE: u32 = 2150859011u32;
pub const ERROR_WSMAN_WMI_MAX_NESTED: u32 = 2150859008u32;
pub const ERROR_WSMAN_WMI_PROVIDER_ACCESS_DENIED: u32 = 2150859013u32;
pub const ERROR_WSMAN_WMI_PROVIDER_INVALID_PARAMETER: u32 = 2150859038u32;
pub const ERROR_WSMAN_WMI_PROVIDER_NOT_CAPABLE: u32 = 2150859010u32;
pub const ERROR_WSMAN_WMI_SVC_ACCESS_DENIED: u32 = 2150859012u32;
pub const ERROR_WSMAN_WRONG_METADATA: u32 = 2150859233u32;
pub type IWSMan = *mut ::core::ffi::c_void;
pub type IWSManConnectionOptions = *mut ::core::ffi::c_void;
pub type IWSManConnectionOptionsEx = *mut ::core::ffi::c_void;
pub type IWSManConnectionOptionsEx2 = *mut ::core::ffi::c_void;
pub type IWSManEnumerator = *mut ::core::ffi::c_void;
pub type IWSManEx = *mut ::core::ffi::c_void;
pub type IWSManEx2 = *mut ::core::ffi::c_void;
pub type IWSManEx3 = *mut ::core::ffi::c_void;
pub type IWSManInternal = *mut ::core::ffi::c_void;
pub type IWSManResourceLocator = *mut ::core::ffi::c_void;
pub type IWSManResourceLocatorInternal = *mut ::core::ffi::c_void;
pub type IWSManSession = *mut ::core::ffi::c_void;
#[repr(C)]
pub struct WSMAN_API(pub u8);
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_AUTHENTICATION_CREDENTIALS {
pub authenticationMechanism: u32,
pub Anonymous: WSMAN_AUTHENTICATION_CREDENTIALS_0,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_AUTHENTICATION_CREDENTIALS {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_AUTHENTICATION_CREDENTIALS {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub union WSMAN_AUTHENTICATION_CREDENTIALS_0 {
pub userAccount: WSMAN_USERNAME_PASSWORD_CREDS,
pub certificateThumbprint: super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_AUTHENTICATION_CREDENTIALS_0 {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_AUTHENTICATION_CREDENTIALS_0 {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct WSMAN_AUTHZ_QUOTA {
pub maxAllowedConcurrentShells: u32,
pub maxAllowedConcurrentOperations: u32,
pub timeslotSize: u32,
pub maxAllowedOperationsPerTimeslot: u32,
}
impl ::core::marker::Copy for WSMAN_AUTHZ_QUOTA {}
impl ::core::clone::Clone for WSMAN_AUTHZ_QUOTA {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_CERTIFICATE_DETAILS {
pub subject: super::super::Foundation::PWSTR,
pub issuerName: super::super::Foundation::PWSTR,
pub issuerThumbprint: super::super::Foundation::PWSTR,
pub subjectName: super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_CERTIFICATE_DETAILS {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_CERTIFICATE_DETAILS {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct WSMAN_COMMAND(pub u8);
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_COMMAND_ARG_SET {
pub argsCount: u32,
pub args: *mut super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_COMMAND_ARG_SET {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_COMMAND_ARG_SET {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_CONNECT_DATA {
pub data: WSMAN_DATA,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_CONNECT_DATA {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_CONNECT_DATA {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_CREATE_SHELL_DATA {
pub data: WSMAN_DATA,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_CREATE_SHELL_DATA {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_CREATE_SHELL_DATA {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_DATA {
pub r#type: WSManDataType,
pub Anonymous: WSMAN_DATA_0,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_DATA {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_DATA {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub union WSMAN_DATA_0 {
pub text: WSMAN_DATA_TEXT,
pub binaryData: WSMAN_DATA_BINARY,
pub number: u32,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_DATA_0 {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_DATA_0 {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct WSMAN_DATA_BINARY {
pub dataLength: u32,
pub data: *mut u8,
}
impl ::core::marker::Copy for WSMAN_DATA_BINARY {}
impl ::core::clone::Clone for WSMAN_DATA_BINARY {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_DATA_TEXT {
pub bufferLength: u32,
pub buffer: super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_DATA_TEXT {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_DATA_TEXT {
fn clone(&self) -> Self {
*self
}
}
pub const WSMAN_DEFAULT_TIMEOUT_MS: u32 = 60000u32;
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_ENVIRONMENT_VARIABLE {
pub name: super::super::Foundation::PWSTR,
pub value: super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_ENVIRONMENT_VARIABLE {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_ENVIRONMENT_VARIABLE {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_ENVIRONMENT_VARIABLE_SET {
pub varsCount: u32,
pub vars: *mut WSMAN_ENVIRONMENT_VARIABLE,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_ENVIRONMENT_VARIABLE_SET {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_ENVIRONMENT_VARIABLE_SET {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_ERROR {
pub code: u32,
pub errorDetail: super::super::Foundation::PWSTR,
pub language: super::super::Foundation::PWSTR,
pub machineName: super::super::Foundation::PWSTR,
pub pluginName: super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_ERROR {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_ERROR {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_FILTER {
pub filter: super::super::Foundation::PWSTR,
pub dialect: super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_FILTER {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_FILTER {
fn clone(&self) -> Self {
*self
}
}
pub const WSMAN_FLAG_RECEIVE_FLUSH: u32 = 2u32;
pub const WSMAN_FLAG_RECEIVE_RESULT_DATA_BOUNDARY: u32 = 4u32;
pub const WSMAN_FLAG_RECEIVE_RESULT_NO_MORE_DATA: u32 = 1u32;
pub const WSMAN_FLAG_REQUESTED_API_VERSION_1_0: u32 = 0u32;
pub const WSMAN_FLAG_REQUESTED_API_VERSION_1_1: u32 = 1u32;
pub const WSMAN_FLAG_SEND_NO_MORE_DATA: u32 = 1u32;
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_FRAGMENT {
pub path: super::super::Foundation::PWSTR,
pub dialect: super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_FRAGMENT {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_FRAGMENT {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_KEY {
pub key: super::super::Foundation::PWSTR,
pub value: super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_KEY {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_KEY {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct WSMAN_OPERATION(pub u8);
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_OPERATION_INFO {
pub fragment: WSMAN_FRAGMENT,
pub filter: WSMAN_FILTER,
pub selectorSet: WSMAN_SELECTOR_SET,
pub optionSet: WSMAN_OPTION_SET,
pub reserved: *mut ::core::ffi::c_void,
pub version: u32,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_OPERATION_INFO {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_OPERATION_INFO {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_OPERATION_INFOEX {
pub fragment: WSMAN_FRAGMENT,
pub filter: WSMAN_FILTER,
pub selectorSet: WSMAN_SELECTOR_SET,
pub optionSet: WSMAN_OPTION_SETEX,
pub version: u32,
pub uiLocale: super::super::Foundation::PWSTR,
pub dataLocale: super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_OPERATION_INFOEX {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_OPERATION_INFOEX {
fn clone(&self) -> Self {
*self
}
}
pub const WSMAN_OPERATION_INFOV1: u32 = 0u32;
pub const WSMAN_OPERATION_INFOV2: u32 = 2864434397u32;
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_OPTION {
pub name: super::super::Foundation::PWSTR,
pub value: super::super::Foundation::PWSTR,
pub mustComply: super::super::Foundation::BOOL,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_OPTION {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_OPTION {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_OPTION_SET {
pub optionsCount: u32,
pub options: *mut WSMAN_OPTION,
pub optionsMustUnderstand: super::super::Foundation::BOOL,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_OPTION_SET {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_OPTION_SET {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_OPTION_SETEX {
pub optionsCount: u32,
pub options: *mut WSMAN_OPTION,
pub optionsMustUnderstand: super::super::Foundation::BOOL,
pub optionTypes: *mut super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_OPTION_SETEX {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_OPTION_SETEX {
fn clone(&self) -> Self {
*self
}
}
#[cfg(feature = "Win32_Foundation")]
pub type WSMAN_PLUGIN_AUTHORIZE_OPERATION = ::core::option::Option<unsafe extern "system" fn(plugincontext: *const ::core::ffi::c_void, senderdetails: *const WSMAN_SENDER_DETAILS, flags: u32, operation: u32, action: super::super::Foundation::PWSTR, resourceuri: super::super::Foundation::PWSTR)>;
#[cfg(feature = "Win32_Foundation")]
pub type WSMAN_PLUGIN_AUTHORIZE_QUERY_QUOTA = ::core::option::Option<unsafe extern "system" fn(plugincontext: *const ::core::ffi::c_void, senderdetails: *const WSMAN_SENDER_DETAILS, flags: u32)>;
pub type WSMAN_PLUGIN_AUTHORIZE_RELEASE_CONTEXT = ::core::option::Option<unsafe extern "system" fn(userauthorizationcontext: *const ::core::ffi::c_void)>;
#[cfg(feature = "Win32_Foundation")]
pub type WSMAN_PLUGIN_AUTHORIZE_USER = ::core::option::Option<unsafe extern "system" fn(plugincontext: *const ::core::ffi::c_void, senderdetails: *const WSMAN_SENDER_DETAILS, flags: u32)>;
#[cfg(feature = "Win32_Foundation")]
pub type WSMAN_PLUGIN_COMMAND = ::core::option::Option<unsafe extern "system" fn(requestdetails: *const WSMAN_PLUGIN_REQUEST, flags: u32, shellcontext: *const ::core::ffi::c_void, commandline: super::super::Foundation::PWSTR, arguments: *const WSMAN_COMMAND_ARG_SET)>;
#[cfg(feature = "Win32_Foundation")]
pub type WSMAN_PLUGIN_CONNECT = ::core::option::Option<unsafe extern "system" fn(requestdetails: *const WSMAN_PLUGIN_REQUEST, flags: u32, shellcontext: *const ::core::ffi::c_void, commandcontext: *const ::core::ffi::c_void, inboundconnectinformation: *const WSMAN_DATA)>;
pub const WSMAN_PLUGIN_PARAMS_AUTORESTART: u32 = 3u32;
pub const WSMAN_PLUGIN_PARAMS_GET_REQUESTED_DATA_LOCALE: u32 = 6u32;
pub const WSMAN_PLUGIN_PARAMS_GET_REQUESTED_LOCALE: u32 = 5u32;
pub const WSMAN_PLUGIN_PARAMS_HOSTIDLETIMEOUTSECONDS: u32 = 4u32;
pub const WSMAN_PLUGIN_PARAMS_LARGEST_RESULT_SIZE: u32 = 4u32;
pub const WSMAN_PLUGIN_PARAMS_MAX_ENVELOPE_SIZE: u32 = 1u32;
pub const WSMAN_PLUGIN_PARAMS_NAME: u32 = 5u32;
pub const WSMAN_PLUGIN_PARAMS_REMAINING_RESULT_SIZE: u32 = 3u32;
pub const WSMAN_PLUGIN_PARAMS_RUNAS_USER: u32 = 2u32;
pub const WSMAN_PLUGIN_PARAMS_SHAREDHOST: u32 = 1u32;
pub const WSMAN_PLUGIN_PARAMS_TIMEOUT: u32 = 2u32;
#[cfg(feature = "Win32_Foundation")]
pub type WSMAN_PLUGIN_RECEIVE = ::core::option::Option<unsafe extern "system" fn(requestdetails: *const WSMAN_PLUGIN_REQUEST, flags: u32, shellcontext: *const ::core::ffi::c_void, commandcontext: *const ::core::ffi::c_void, streamset: *const WSMAN_STREAM_ID_SET)>;
pub type WSMAN_PLUGIN_RELEASE_COMMAND_CONTEXT = ::core::option::Option<unsafe extern "system" fn(shellcontext: *const ::core::ffi::c_void, commandcontext: *const ::core::ffi::c_void)>;
pub type WSMAN_PLUGIN_RELEASE_SHELL_CONTEXT = ::core::option::Option<unsafe extern "system" fn(shellcontext: *const ::core::ffi::c_void)>;
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_PLUGIN_REQUEST {
pub senderDetails: *mut WSMAN_SENDER_DETAILS,
pub locale: super::super::Foundation::PWSTR,
pub resourceUri: super::super::Foundation::PWSTR,
pub operationInfo: *mut WSMAN_OPERATION_INFO,
pub shutdownNotification: i32,
pub shutdownNotificationHandle: super::super::Foundation::HANDLE,
pub dataLocale: super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_PLUGIN_REQUEST {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_PLUGIN_REQUEST {
fn clone(&self) -> Self {
*self
}
}
#[cfg(feature = "Win32_Foundation")]
pub type WSMAN_PLUGIN_SEND = ::core::option::Option<unsafe extern "system" fn(requestdetails: *const WSMAN_PLUGIN_REQUEST, flags: u32, shellcontext: *const ::core::ffi::c_void, commandcontext: *const ::core::ffi::c_void, stream: super::super::Foundation::PWSTR, inbounddata: *const WSMAN_DATA)>;
#[cfg(feature = "Win32_Foundation")]
pub type WSMAN_PLUGIN_SHELL = ::core::option::Option<unsafe extern "system" fn(plugincontext: *const ::core::ffi::c_void, requestdetails: *const WSMAN_PLUGIN_REQUEST, flags: u32, startupinfo: *const WSMAN_SHELL_STARTUP_INFO_V11, inboundshellinformation: *const WSMAN_DATA)>;
pub type WSMAN_PLUGIN_SHUTDOWN = ::core::option::Option<unsafe extern "system" fn(plugincontext: *const ::core::ffi::c_void, flags: u32, reason: u32) -> u32>;
pub const WSMAN_PLUGIN_SHUTDOWN_IDLETIMEOUT_ELAPSED: u32 = 4u32;
pub const WSMAN_PLUGIN_SHUTDOWN_IISHOST: u32 = 3u32;
pub const WSMAN_PLUGIN_SHUTDOWN_SERVICE: u32 = 2u32;
pub const WSMAN_PLUGIN_SHUTDOWN_SYSTEM: u32 = 1u32;
#[cfg(feature = "Win32_Foundation")]
pub type WSMAN_PLUGIN_SIGNAL = ::core::option::Option<unsafe extern "system" fn(requestdetails: *const WSMAN_PLUGIN_REQUEST, flags: u32, shellcontext: *const ::core::ffi::c_void, commandcontext: *const ::core::ffi::c_void, code: super::super::Foundation::PWSTR)>;
#[cfg(feature = "Win32_Foundation")]
pub type WSMAN_PLUGIN_STARTUP = ::core::option::Option<unsafe extern "system" fn(flags: u32, applicationidentification: super::super::Foundation::PWSTR, extrainfo: super::super::Foundation::PWSTR, plugincontext: *mut *mut ::core::ffi::c_void) -> u32>;
pub const WSMAN_PLUGIN_STARTUP_AUTORESTARTED_CRASH: u32 = 2u32;
pub const WSMAN_PLUGIN_STARTUP_AUTORESTARTED_REBOOT: u32 = 1u32;
pub const WSMAN_PLUGIN_STARTUP_REQUEST_RECEIVED: u32 = 0u32;
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_PROXY_INFO {
pub accessType: u32,
pub authenticationCredentials: WSMAN_AUTHENTICATION_CREDENTIALS,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_PROXY_INFO {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_PROXY_INFO {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_RECEIVE_DATA_RESULT {
pub streamId: super::super::Foundation::PWSTR,
pub streamData: WSMAN_DATA,
pub commandState: super::super::Foundation::PWSTR,
pub exitCode: u32,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_RECEIVE_DATA_RESULT {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_RECEIVE_DATA_RESULT {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub union WSMAN_RESPONSE_DATA {
pub receiveData: WSMAN_RECEIVE_DATA_RESULT,
pub connectData: WSMAN_CONNECT_DATA,
pub createData: WSMAN_CREATE_SHELL_DATA,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_RESPONSE_DATA {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_RESPONSE_DATA {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_SELECTOR_SET {
pub numberKeys: u32,
pub keys: *mut WSMAN_KEY,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_SELECTOR_SET {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_SELECTOR_SET {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_SENDER_DETAILS {
pub senderName: super::super::Foundation::PWSTR,
pub authenticationMechanism: super::super::Foundation::PWSTR,
pub certificateDetails: *mut WSMAN_CERTIFICATE_DETAILS,
pub clientToken: super::super::Foundation::HANDLE,
pub httpURL: super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_SENDER_DETAILS {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_SENDER_DETAILS {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
pub struct WSMAN_SESSION(pub u8);
#[repr(C)]
pub struct WSMAN_SHELL(pub u8);
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_SHELL_ASYNC {
pub operationContext: *mut ::core::ffi::c_void,
pub completionFunction: WSMAN_SHELL_COMPLETION_FUNCTION,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_SHELL_ASYNC {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_SHELL_ASYNC {
fn clone(&self) -> Self {
*self
}
}
#[cfg(feature = "Win32_Foundation")]
pub type WSMAN_SHELL_COMPLETION_FUNCTION = ::core::option::Option<unsafe extern "system" fn(operationcontext: *const ::core::ffi::c_void, flags: u32, error: *const WSMAN_ERROR, shell: *const WSMAN_SHELL, command: *const WSMAN_COMMAND, operationhandle: *const WSMAN_OPERATION, data: *const WSMAN_RESPONSE_DATA)>;
#[repr(C)]
pub struct WSMAN_SHELL_DISCONNECT_INFO {
pub idleTimeoutMs: u32,
}
impl ::core::marker::Copy for WSMAN_SHELL_DISCONNECT_INFO {}
impl ::core::clone::Clone for WSMAN_SHELL_DISCONNECT_INFO {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_SHELL_STARTUP_INFO_V10 {
pub inputStreamSet: *mut WSMAN_STREAM_ID_SET,
pub outputStreamSet: *mut WSMAN_STREAM_ID_SET,
pub idleTimeoutMs: u32,
pub workingDirectory: super::super::Foundation::PWSTR,
pub variableSet: *mut WSMAN_ENVIRONMENT_VARIABLE_SET,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_SHELL_STARTUP_INFO_V10 {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_SHELL_STARTUP_INFO_V10 {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_SHELL_STARTUP_INFO_V11 {
pub __AnonymousBase_wsman_L665_C48: WSMAN_SHELL_STARTUP_INFO_V10,
pub name: super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_SHELL_STARTUP_INFO_V11 {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_SHELL_STARTUP_INFO_V11 {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_STREAM_ID_SET {
pub streamIDsCount: u32,
pub streamIDs: *mut super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_STREAM_ID_SET {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_STREAM_ID_SET {
fn clone(&self) -> Self {
*self
}
}
#[repr(C)]
#[cfg(feature = "Win32_Foundation")]
pub struct WSMAN_USERNAME_PASSWORD_CREDS {
pub username: super::super::Foundation::PWSTR,
pub password: super::super::Foundation::PWSTR,
}
#[cfg(feature = "Win32_Foundation")]
impl ::core::marker::Copy for WSMAN_USERNAME_PASSWORD_CREDS {}
#[cfg(feature = "Win32_Foundation")]
impl ::core::clone::Clone for WSMAN_USERNAME_PASSWORD_CREDS {
fn clone(&self) -> Self {
*self
}
}
pub const WSMan: ::windows_sys::core::GUID = ::windows_sys::core::GUID {
data1: 3169673595,
data2: 60419,
data3: 16907,
data4: [133, 8, 151, 125, 199, 166, 134, 189],
};
pub type WSManAuthenticationFlags = i32;
pub const WSMAN_FLAG_DEFAULT_AUTHENTICATION: WSManAuthenticationFlags = 0i32;
pub const WSMAN_FLAG_NO_AUTHENTICATION: WSManAuthenticationFlags = 1i32;
pub const WSMAN_FLAG_AUTH_DIGEST: WSManAuthenticationFlags = 2i32;
pub const WSMAN_FLAG_AUTH_NEGOTIATE: WSManAuthenticationFlags = 4i32;
pub const WSMAN_FLAG_AUTH_BASIC: WSManAuthenticationFlags = 8i32;
pub const WSMAN_FLAG_AUTH_KERBEROS: WSManAuthenticationFlags = 16i32;
pub const WSMAN_FLAG_AUTH_CREDSSP: WSManAuthenticationFlags = 128i32;
pub const WSMAN_FLAG_AUTH_CLIENT_CERTIFICATE: WSManAuthenticationFlags = 32i32;
pub type WSManCallbackFlags = i32;
pub const WSMAN_FLAG_CALLBACK_END_OF_OPERATION: WSManCallbackFlags = 1i32;
pub const WSMAN_FLAG_CALLBACK_END_OF_STREAM: WSManCallbackFlags = 8i32;
pub const WSMAN_FLAG_CALLBACK_SHELL_SUPPORTS_DISCONNECT: WSManCallbackFlags = 32i32;
pub const WSMAN_FLAG_CALLBACK_SHELL_AUTODISCONNECTED: WSManCallbackFlags = 64i32;
pub const WSMAN_FLAG_CALLBACK_NETWORK_FAILURE_DETECTED: WSManCallbackFlags = 256i32;
pub const WSMAN_FLAG_CALLBACK_RETRYING_AFTER_NETWORK_FAILURE: WSManCallbackFlags = 512i32;
pub const WSMAN_FLAG_CALLBACK_RECONNECTED_AFTER_NETWORK_FAILURE: WSManCallbackFlags = 1024i32;
pub const WSMAN_FLAG_CALLBACK_SHELL_AUTODISCONNECTING: WSManCallbackFlags = 2048i32;
pub const WSMAN_FLAG_CALLBACK_RETRY_ABORTED_DUE_TO_INTERNAL_ERROR: WSManCallbackFlags = 4096i32;
pub const WSMAN_FLAG_CALLBACK_RECEIVE_DELAY_STREAM_REQUEST_PROCESSED: WSManCallbackFlags = 8192i32;
pub type WSManDataType = i32;
pub const WSMAN_DATA_NONE: WSManDataType = 0i32;
pub const WSMAN_DATA_TYPE_TEXT: WSManDataType = 1i32;
pub const WSMAN_DATA_TYPE_BINARY: WSManDataType = 2i32;
pub const WSMAN_DATA_TYPE_DWORD: WSManDataType = 4i32;
pub type WSManEnumFlags = i32;
pub const WSManFlagNonXmlText: WSManEnumFlags = 1i32;
pub const WSManFlagReturnObject: WSManEnumFlags = 0i32;
pub const WSManFlagReturnEPR: WSManEnumFlags = 2i32;
pub const WSManFlagReturnObjectAndEPR: WSManEnumFlags = 4i32;
pub const WSManFlagHierarchyDeep: WSManEnumFlags = 0i32;
pub const WSManFlagHierarchyShallow: WSManEnumFlags = 32i32;
pub const WSManFlagHierarchyDeepBasePropsOnly: WSManEnumFlags = 64i32;
pub const WSManFlagAssociatedInstance: WSManEnumFlags = 0i32;
pub const WSManFlagAssociationInstance: WSManEnumFlags = 128i32;
pub const WSManInternal: ::windows_sys::core::GUID = ::windows_sys::core::GUID { data1: 2111866789, data2: 24011, data3: 19959, data4: [187, 18, 9, 36, 173, 143, 189, 154] };
pub type WSManProxyAccessType = i32;
pub const WSMAN_OPTION_PROXY_IE_PROXY_CONFIG: WSManProxyAccessType = 1i32;
pub const WSMAN_OPTION_PROXY_WINHTTP_PROXY_CONFIG: WSManProxyAccessType = 2i32;
pub const WSMAN_OPTION_PROXY_AUTO_DETECT: WSManProxyAccessType = 4i32;
pub const WSMAN_OPTION_PROXY_NO_PROXY_SERVER: WSManProxyAccessType = 8i32;
pub type WSManProxyAccessTypeFlags = i32;
pub const WSManProxyIEConfig: WSManProxyAccessTypeFlags = 1i32;
pub const WSManProxyWinHttpConfig: WSManProxyAccessTypeFlags = 2i32;
pub const WSManProxyAutoDetect: WSManProxyAccessTypeFlags = 4i32;
pub const WSManProxyNoProxyServer: WSManProxyAccessTypeFlags = 8i32;
pub type WSManProxyAuthenticationFlags = i32;
pub const WSManFlagProxyAuthenticationUseNegotiate: WSManProxyAuthenticationFlags = 1i32;
pub const WSManFlagProxyAuthenticationUseBasic: WSManProxyAuthenticationFlags = 2i32;
pub const WSManFlagProxyAuthenticationUseDigest: WSManProxyAuthenticationFlags = 4i32;
pub type WSManSessionFlags = i32;
pub const WSManFlagUTF8: WSManSessionFlags = 1i32;
pub const WSManFlagCredUsernamePassword: WSManSessionFlags = 4096i32;
pub const WSManFlagSkipCACheck: WSManSessionFlags = 8192i32;
pub const WSManFlagSkipCNCheck: WSManSessionFlags = 16384i32;
pub const WSManFlagUseNoAuthentication: WSManSessionFlags = 32768i32;
pub const WSManFlagUseDigest: WSManSessionFlags = 65536i32;
pub const WSManFlagUseNegotiate: WSManSessionFlags = 131072i32;
pub const WSManFlagUseBasic: WSManSessionFlags = 262144i32;
pub const WSManFlagUseKerberos: WSManSessionFlags = 524288i32;
pub const WSManFlagNoEncryption: WSManSessionFlags = 1048576i32;
pub const WSManFlagUseClientCertificate: WSManSessionFlags = 2097152i32;
pub const WSManFlagEnableSPNServerPort: WSManSessionFlags = 4194304i32;
pub const WSManFlagUTF16: WSManSessionFlags = 8388608i32;
pub const WSManFlagUseCredSsp: WSManSessionFlags = 16777216i32;
pub const WSManFlagSkipRevocationCheck: WSManSessionFlags = 33554432i32;
pub const WSManFlagAllowNegotiateImplicitCredentials: WSManSessionFlags = 67108864i32;
pub const WSManFlagUseSsl: WSManSessionFlags = 134217728i32;
pub type WSManSessionOption = i32;
pub const WSMAN_OPTION_DEFAULT_OPERATION_TIMEOUTMS: WSManSessionOption = 1i32;
pub const WSMAN_OPTION_MAX_RETRY_TIME: WSManSessionOption = 11i32;
pub const WSMAN_OPTION_TIMEOUTMS_CREATE_SHELL: WSManSessionOption = 12i32;
pub const WSMAN_OPTION_TIMEOUTMS_RUN_SHELL_COMMAND: WSManSessionOption = 13i32;
pub const WSMAN_OPTION_TIMEOUTMS_RECEIVE_SHELL_OUTPUT: WSManSessionOption = 14i32;
pub const WSMAN_OPTION_TIMEOUTMS_SEND_SHELL_INPUT: WSManSessionOption = 15i32;
pub const WSMAN_OPTION_TIMEOUTMS_SIGNAL_SHELL: WSManSessionOption = 16i32;
pub const WSMAN_OPTION_TIMEOUTMS_CLOSE_SHELL: WSManSessionOption = 17i32;
pub const WSMAN_OPTION_SKIP_CA_CHECK: WSManSessionOption = 18i32;
pub const WSMAN_OPTION_SKIP_CN_CHECK: WSManSessionOption = 19i32;
pub const WSMAN_OPTION_UNENCRYPTED_MESSAGES: WSManSessionOption = 20i32;
pub const WSMAN_OPTION_UTF16: WSManSessionOption = 21i32;
pub const WSMAN_OPTION_ENABLE_SPN_SERVER_PORT: WSManSessionOption = 22i32;
pub const WSMAN_OPTION_MACHINE_ID: WSManSessionOption = 23i32;
pub const WSMAN_OPTION_LOCALE: WSManSessionOption = 25i32;
pub const WSMAN_OPTION_UI_LANGUAGE: WSManSessionOption = 26i32;
pub const WSMAN_OPTION_MAX_ENVELOPE_SIZE_KB: WSManSessionOption = 28i32;
pub const WSMAN_OPTION_SHELL_MAX_DATA_SIZE_PER_MESSAGE_KB: WSManSessionOption = 29i32;
pub const WSMAN_OPTION_REDIRECT_LOCATION: WSManSessionOption = 30i32;
pub const WSMAN_OPTION_SKIP_REVOCATION_CHECK: WSManSessionOption = 31i32;
pub const WSMAN_OPTION_ALLOW_NEGOTIATE_IMPLICIT_CREDENTIALS: WSManSessionOption = 32i32;
pub const WSMAN_OPTION_USE_SSL: WSManSessionOption = 33i32;
pub const WSMAN_OPTION_USE_INTEARACTIVE_TOKEN: WSManSessionOption = 34i32;
pub type WSManShellFlag = i32;
pub const WSMAN_FLAG_NO_COMPRESSION: WSManShellFlag = 1i32;
pub const WSMAN_FLAG_DELETE_SERVER_SESSION: WSManShellFlag = 2i32;
pub const WSMAN_FLAG_SERVER_BUFFERING_MODE_DROP: WSManShellFlag = 4i32;
pub const WSMAN_FLAG_SERVER_BUFFERING_MODE_BLOCK: WSManShellFlag = 8i32;
pub const WSMAN_FLAG_RECEIVE_DELAY_OUTPUT_STREAM: WSManShellFlag = 16i32;
|
///Contains all states of the UI that we will run through in game which alters the main UI
#[derive(Debug, Clone)]
pub enum UIState {
EscapeMenu,
SaveMenu,
LoadMenu,
OptionsMenu,
ShopMenu,
UpgradeMenu,
InventoryMenu,
DialogueMenu,
NormalState,
}
//systems for managing the UI. not important for now
///If game is paused, all other systems must pause as well, if unpaused, gamestate continues from current position
pub fn handle_game_state() {
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
|
#![feature(plugin)]
#![plugin(rocket_codegen)]
#[macro_use]
extern crate lazy_static;
extern crate reqwest;
#[macro_use]
extern crate serde_derive;
extern crate serde_json;
extern crate serde_yaml;
extern crate log;
use log::*;
use std::fs::File;
use std::path::{Path, PathBuf};
use serde_json::Value;
extern crate rocket;
extern crate rocket_contrib;
use rocket::response;
use rocket_contrib::Json;
#[derive(Serialize, Deserialize, Debug)]
struct Route {
rid: i64,
sec: i64,
name: String,
to: String,
stop_id: usize,
stop_name: String,
}
#[derive(Serialize, Debug)]
struct RouteData {
route: &'static Route,
eta: i64,
}
type FetchResult = Result<RouteData, String>;
fn proc_route(route: &'static Route) -> FetchResult {
const BASE_URL: &'static str = "http://routes.5284.com.tw/ntpcebus/Js/RouteInfo";
let url = format!("{}?rid={}&sec={}", BASE_URL, route.rid, route.sec);
let resp = reqwest::get(&*url).map_err(|_| "Get result build failed")?;
if !resp.status().is_success() {
return Err(format!("Return status {}", resp.status()));
}
let json_data: Value = serde_json::from_reader(resp).map_err(|_| "Parse json failed")?;
let eta = json_data.get("Etas")
.and_then(|x| x.get(route.stop_id))
.and_then(|x| x.get("eta"))
.and_then(|x| x.as_i64())
.ok_or("Extract value from json failed")?;
println!("{}: eta = {}", route.name, eta);
Ok(RouteData{
route: route,
eta: eta,
})
}
lazy_static! {
static ref ROUTE_YAML: Vec<Route> = {
let file = File::open("data.yaml")
.map_err(|_| "Open data.yaml failed").unwrap();
let yaml_data: Vec<Route> = serde_yaml::from_reader(file)
.map_err(|_| "yaml parsed failed").unwrap();
yaml_data
};
}
#[get("/")]
fn index() -> Option<response::NamedFile> {
files("index.html".into())
}
#[get("/<path..>", rank=5)]
fn files(path: PathBuf) -> Option<response::NamedFile> {
response::NamedFile::open(Path::new("static/").join(path)).ok()
}
#[get("/routes")]
fn get_routes() -> Result<Json<Vec<RouteData>>, response::status::Custom<&'static str>> {
let result: Result<Vec<RouteData>, String> = ROUTE_YAML.iter().map(proc_route).collect();
match result {
Ok(res) => Ok(Json(res)),
Err(_) => Err(response::status::Custom(rocket::http::Status::InternalServerError, "Can't fetch data"))
}
}
fn main() {
rocket::ignite()
.mount("/", routes![files, index])
.mount("/ajax/", routes![get_routes])
.launch();
}
|
use core::cell::RefCell;
use cortex_m::interrupt::{free, Mutex};
use embedded_graphics::{egtext, image::Image, pixelcolor::BinaryColor, prelude::*, text_style};
use heapless::{consts::*, HistoryBuffer, String};
use profont::{ProFont12Point, ProFont9Point};
use ssd1306::{mode::GraphicsMode, prelude::*};
use tinybmp::Bmp;
use ufmt::uwrite;
use crate::SystemData;
const NUM_LOG_LINES: usize = 4;
lazy_static! {
static ref RF_BMP: Bmp<'static> = Bmp::from_slice(include_bytes!("./radio.bmp")).unwrap();
static ref LOG_BUFFER: Mutex<RefCell<HistoryBuffer<String<U32>, U4>>> =
Mutex::new(RefCell::new(HistoryBuffer::new()));
}
pub struct Ui<'t, I>
where
I: WriteOnlyDataCommand,
{
rf_icon: Image<'t, Bmp<'static>, BinaryColor>,
display: GraphicsMode<I, DisplaySize128x32>,
}
impl<'t, I> Ui<'t, I>
where
I: WriteOnlyDataCommand,
{
pub fn new(display: GraphicsMode<I, DisplaySize128x32>) -> Self {
Self {
display,
rf_icon: Image::new(&RF_BMP, Point::new(105, 20)),
}
}
pub fn clear(&mut self) {
self.display.clear();
}
pub fn flush(&mut self) {
self.display.flush().unwrap();
}
pub fn draw(&mut self, system_data: &SystemData) {
let mut text: String<U16> = String::new();
if system_data.errors.temperature {
uwrite!(&mut text, "ERR").unwrap();
} else {
let t = system_data.sensors.avgs.temperature;
let t_int = t / 100;
let t_dec = t % 100;
uwrite!(&mut text, "{}.{}C", t_int, t_dec).unwrap();
}
egtext!(
text = &text,
top_left = (0, 0),
style = text_style!(font = ProFont12Point, text_color = BinaryColor::On)
)
.draw(&mut self.display)
.unwrap();
text.clear();
if system_data.errors.humidity {
uwrite!(&mut text, "ERR").unwrap();
} else {
let h = system_data.sensors.avgs.humidity;
uwrite!(&mut text, "{}%", h).unwrap();
}
egtext!(
text = &text,
top_left = (95, 0),
style = text_style!(font = ProFont12Point, text_color = BinaryColor::On)
)
.draw(&mut self.display)
.unwrap();
text.clear();
if system_data.errors.co2 {
uwrite!(&mut text, "ERR").unwrap();
} else {
let c = system_data.sensors.avgs.co2;
uwrite!(&mut text, "{}ppm", c).unwrap();
}
// match system_data.sensors.co2.recent() {
// Some(c) => {
// uwrite!(&mut text, "{}ppm", c).unwrap();
// },
// None => {
// uwrite!(text, "ERR").unwrap();
// }
// }
egtext!(
text = &text,
top_left = (00, 18),
style = text_style!(font = ProFont12Point, text_color = BinaryColor::On)
)
.draw(&mut self.display)
.unwrap();
if system_data.ticks_since_last_tx < 10 {
self.rf_icon.draw(&mut self.display).unwrap();
}
}
pub fn log_to_screen(&mut self, text: &str) {
self.display.clear();
free(|cs| {
let mut buffer = LOG_BUFFER.borrow(cs).borrow_mut();
buffer.write(String::from(text));
for i in 0..NUM_LOG_LINES {
egtext!(
text = &buffer.as_slice()[i],
top_left = (0, (i * 8) as i32),
style = text_style!(font = ProFont9Point, text_color = BinaryColor::On)
)
.draw(&mut self.display)
.unwrap();
}
});
self.display.flush().unwrap();
}
}
|
use k8s_openapi::api::core::v1::Event;
use reqwest::Client;
use serde::{Deserialize, Serialize};
fn format_message(event: Event) -> TeamsMessage {
TeamsMessage {
message_type: "MessageCard",
context: "http://schema.org/extensions",
theme_color: "0076D7",
summary: event,
}
}
pub fn send_message(url: &str, event: Event) {
let msg = format_message(event);
let resp = Client::new().json(&msg).send().await()?.json().await()?;
todo!()
}
#[derive(Serialize, Deserialize)]
struct TeamsMessage {
#[serde(rename = "@type")]
message_type: String,
#[serde(rename = "@context")]
context: String,
#[serde(rename = "themeColor")]
theme_color: String,
summary: String,
sections: Vec<TeamsMessageSection>,
}
struct TeamsMessageSection {
#[serde(rename = "activityTitle")]
activity_title: String,
#[serde(rename = "activitySubtitle")]
activity_subtitle: String,
#[serde(rename = "activityImage")]
activity_image: String,
facts: Vec<TeamsMessageSectionFact>,
markdown: bool,
}
struct TeamsMessageSectionFact {
name: String,
value: String,
}
|
use crate::universe::{RsUniverse, Cell};
use super::RsRenderer;
use super::consts::*;
pub trait RsNoGridRenderer {
fn new_filled(width: usize, height: usize) -> Self;
fn render(&mut self, universe: &RsUniverse);
}
impl RsNoGridRenderer for RsRenderer {
fn new_filled(width: usize, height: usize) -> RsRenderer {
let mut r = RsRenderer::new(width, height);
let mut idx = 0;
for _i in 0..r.get_framebuffer_len() / BPP {
r.framebuffer[idx + 0] = GRID_COLOR[0];
r.framebuffer[idx + 1] = GRID_COLOR[1];
r.framebuffer[idx + 2] = GRID_COLOR[2];
r.framebuffer[idx + 3] = GRID_COLOR[3];
idx += BPP;
}
r
}
fn render(&mut self, universe: &RsUniverse) {
let cells = universe.get_cells();
let pitch = self.get_framebuffer_width() * BPP;
let mut cell_offset = GRID_SIZE * pitch;
for row in 0..self.height {
cell_offset += GRID_SIZE * BPP;
for col in 0..self.width {
let color = match cells[universe.get_index(row, col)] {
Cell::Dead => DEAD_COLOR,
Cell::Alive => ALIVE_COLOR,
};
for i in 0..CELL_SIZE {
self.framebuffer[cell_offset + i * BPP + 0] = color[0];
self.framebuffer[cell_offset + i * BPP + 1] = color[1];
self.framebuffer[cell_offset + i * BPP + 2] = color[2];
// Skip the alpha channel, it's always 255.
}
for i in 1..CELL_SIZE {
self.framebuffer.copy_within(
cell_offset..cell_offset + CELL_SIZE * BPP,
cell_offset + i * pitch
);
}
cell_offset += (CELL_SIZE + GRID_SIZE) * BPP;
}
cell_offset += (CELL_SIZE-1 + GRID_SIZE) * pitch;
}
// let cells = universe.get_cells();
// let fb_width = self.get_framebuffer_width();
// let fb_height = self.get_framebuffer_height();
// let pitch = fb_width * BPP;
//
// let mut row = 0;
// let mut drawn_rows = 0;
//
// let mut cell_offset = GRID_SIZE * pitch;
//
// for y in 0..fb_height-((self.height + 1) * GRID_SIZE) {
// cell_offset += GRID_SIZE * BPP;
//
// for col in 0..self.width {
// let color = match cells[self.width * row + col] {
// Cell::Dead => DEAD_COLOR,
// Cell::Alive => ALIVE_COLOR,
// };
//
// for i in 0..CELL_SIZE {
// self.framebuffer[cell_offset + i * BPP + 0] = color[0];
// self.framebuffer[cell_offset + i * BPP + 1] = color[1];
// self.framebuffer[cell_offset + i * BPP + 2] = color[2];
// // Skip the alpha channel, it's always 255.
// }
//
//
// cell_offset += (CELL_SIZE + GRID_SIZE) * BPP;
// }
//
// drawn_rows += 1;
// if drawn_rows == CELL_SIZE {
// row += 1;
// drawn_rows = 0;
// cell_offset += GRID_SIZE * pitch;
// }
// }
}
} |
use std::time::{SystemTime, Duration, UNIX_EPOCH};
use segment::Metric;
#[macro_use]
extern crate criterion;
use criterion::{Criterion, BatchSize};
#[derive(Metric)]
#[segment(measurement="stringtest")]
pub struct StringTest {
#[segment(time)]
timestamp: Duration,
#[segment(tag)]
tag0: String,
#[segment(tag)]
tag1: String,
#[segment(field)]
field0: String,
#[segment(field)]
field1: String,
}
// Define a metric..
#[derive(Metric)]
#[segment(measurement="procstat")]
pub struct ProcStats {
#[segment(time)]
timestamp: Duration,
#[segment(tag, rename="exe")]
executable: String,
#[segment(tag, rename="process_name")]
process: String,
#[segment(field)]
cpu_time: u32,
#[segment(field)]
cpu_time_guest: f64,
#[segment(field)]
cpu_time_guest_nice: f64,
#[segment(field)]
cpu_time_idle: f64,
#[segment(field)]
cpu_time_iowait: f64,
#[segment(field)]
cpu_time_irq: f64,
#[segment(field)]
cpu_time_nice: f64,
#[segment(field)]
cpu_time_soft_irq: f64,
#[segment(field)]
cpu_time_steal: f64,
#[segment(field)]
cpu_time_stolen: f64,
#[segment(field)]
cpu_time_system: f64,
#[segment(field)]
cpu_time_user: f64,
#[segment(field)]
cpu_usage: f64,
#[segment(field)]
involuntary_context_switches: u32,
#[segment(field)]
memory_data: u64,
#[segment(field)]
memory_locked: u64,
#[segment(field)]
memory_rss: u64,
#[segment(field)]
memory_stack: u64,
#[segment(field)]
memory_swap: u64,
#[segment(field)]
memory_vms: u64,
#[segment(field)]
nice_priority: u32,
#[segment(field)]
num_fds: u32,
#[segment(field)]
num_threads: u32,
#[segment(field)]
pid: u32,
#[segment(field)]
read_bytes: u64,
#[segment(field)]
read_count: u64,
#[segment(field)]
realtime_priority: u32,
#[segment(field)]
rlimit_cpu_time_hard: u32,
#[segment(field)]
rlimit_cpu_time_soft: u32,
#[segment(field)]
rlimit_file_locks_hard: u32,
#[segment(field)]
rlimit_file_locks_soft: u32,
#[segment(field)]
rlimit_memory_data_hard: u32,
#[segment(field)]
rlimit_memory_data_soft: u32,
#[segment(field)]
rlimit_memory_locked_hard: u32,
#[segment(field)]
rlimit_memory_locked_soft: u32,
#[segment(field)]
rlimit_memory_rss_hard: u32,
#[segment(field)]
rlimit_memory_rss_soft: u32,
#[segment(field)]
rlimit_memory_stack_hard: u32,
#[segment(field)]
rlimit_memory_stack_soft: u32,
#[segment(field)]
rlimit_memory_vms_hard: u32,
#[segment(field)]
rlimit_memory_vms_soft: u32,
#[segment(field)]
rlimit_nice_priority_hard: u32,
#[segment(field)]
rlimit_nice_priority_soft: u32,
#[segment(field)]
rlimit_num_fds_hard: u32,
#[segment(field)]
rlimit_num_fds_soft: u32,
#[segment(field)]
rlimit_realtime_priority_hard: u32,
#[segment(field)]
rlimit_realtime_priority_soft: u32,
#[segment(field)]
rlimit_signals_pending_hard: u32,
#[segment(field)]
rlimit_signals_pending_soft: u32,
#[segment(field)]
signals_pending: u32,
#[segment(field)]
voluntary_context_switches: u32,
#[segment(field)]
write_bytes: u32,
#[segment(field)]
write_count: u32,
}
fn criterion_benchmark(c: &mut Criterion) {
let t = SystemTime::now().duration_since(UNIX_EPOCH).expect("unable to generate now()");
let procstats = ProcStats {
timestamp: t,
executable: "bash".to_string(),
process: "bash".to_string(),
cpu_time: 0,
cpu_time_guest: 0_f64,
cpu_time_guest_nice: 0_f64,
cpu_time_idle: 0_f64,
cpu_time_iowait: 0_f64,
cpu_time_irq: 0_f64,
cpu_time_nice: 0_f64,
cpu_time_soft_irq: 0_f64,
cpu_time_steal: 0_f64,
cpu_time_stolen: 0_f64,
cpu_time_system: 0_f64,
cpu_time_user: 0.02_f64,
cpu_usage: 0_f64,
involuntary_context_switches: 2,
memory_data: 1576960,
memory_locked: 0,
memory_rss: 5103616,
memory_stack: 139264,
memory_swap: 0,
memory_vms: 21659648,
nice_priority: 20,
num_fds: 4,
num_threads: 1,
pid: 29417,
read_bytes: 0,
read_count: 259,
realtime_priority: 0,
rlimit_cpu_time_hard: 2147483647,
rlimit_cpu_time_soft: 2147483647,
rlimit_file_locks_hard: 2147483647,
rlimit_file_locks_soft: 2147483647,
rlimit_memory_data_hard: 2147483647,
rlimit_memory_data_soft: 2147483647,
rlimit_memory_locked_hard: 65536,
rlimit_memory_locked_soft: 65536,
rlimit_memory_rss_hard: 2147483647,
rlimit_memory_rss_soft: 2147483647,
rlimit_memory_stack_hard: 2147483647,
rlimit_memory_stack_soft: 8388608,
rlimit_memory_vms_hard: 2147483647,
rlimit_memory_vms_soft: 2147483647,
rlimit_nice_priority_hard: 0,
rlimit_nice_priority_soft: 0,
rlimit_num_fds_hard: 4096,
rlimit_num_fds_soft: 1024,
rlimit_realtime_priority_hard: 0,
rlimit_realtime_priority_soft: 0,
rlimit_signals_pending_hard: 78994,
rlimit_signals_pending_soft: 78994,
signals_pending: 0,
voluntary_context_switches: 42,
write_bytes: 106496,
write_count: 35,
};
c.bench_function("procstats-2tags-52fields", move |b| {
b.iter_batched_ref(
|| String::with_capacity(3048),
|buffer: &mut String| {
procstats.build(buffer);
buffer.clear();
},
BatchSize::SmallInput
)
});
let strings = StringTest{
timestamp: t,
tag0: "Taggy Tag Tag".to_string(),
tag1: "tag\ntag tag".to_string(),
field0: "Hello world, hello world".to_string(),
field1: "hello \n World, hello there".to_string(),
};
c.bench_function("stringfields", move |b| {
b.iter_batched_ref(
|| String::with_capacity(128),
|buffer: &mut String| {
strings.build(buffer);
buffer.clear();
},
BatchSize::SmallInput
)
});
}
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
|
/*! Python `property` descriptor class.
*/
use super::{PyStrRef, PyType, PyTypeRef};
use crate::common::lock::PyRwLock;
use crate::function::{IntoFuncArgs, PosArgs};
use crate::{
class::PyClassImpl,
function::{FuncArgs, PySetterValue},
types::{Constructor, GetDescriptor, Initializer},
AsObject, Context, Py, PyObject, PyObjectRef, PyPayload, PyRef, PyResult, VirtualMachine,
};
#[pyclass(module = false, name = "property", traverse)]
#[derive(Debug)]
pub struct PyProperty {
getter: PyRwLock<Option<PyObjectRef>>,
setter: PyRwLock<Option<PyObjectRef>>,
deleter: PyRwLock<Option<PyObjectRef>>,
doc: PyRwLock<Option<PyObjectRef>>,
name: PyRwLock<Option<PyObjectRef>>,
}
impl PyPayload for PyProperty {
fn class(ctx: &Context) -> &'static Py<PyType> {
ctx.types.property_type
}
}
#[derive(FromArgs)]
pub struct PropertyArgs {
#[pyarg(any, default)]
fget: Option<PyObjectRef>,
#[pyarg(any, default)]
fset: Option<PyObjectRef>,
#[pyarg(any, default)]
fdel: Option<PyObjectRef>,
#[pyarg(any, default)]
doc: Option<PyObjectRef>,
#[pyarg(any, default)]
name: Option<PyStrRef>,
}
impl GetDescriptor for PyProperty {
fn descr_get(
zelf_obj: PyObjectRef,
obj: Option<PyObjectRef>,
_cls: Option<PyObjectRef>,
vm: &VirtualMachine,
) -> PyResult {
let (zelf, obj) = Self::_unwrap(&zelf_obj, obj, vm)?;
if vm.is_none(&obj) {
Ok(zelf_obj)
} else if let Some(getter) = zelf.getter.read().as_ref() {
getter.call((obj,), vm)
} else {
Err(vm.new_attribute_error("property has no getter".to_string()))
}
}
}
#[pyclass(with(Constructor, Initializer, GetDescriptor), flags(BASETYPE))]
impl PyProperty {
// Descriptor methods
#[pyslot]
fn descr_set(
zelf: &PyObject,
obj: PyObjectRef,
value: PySetterValue,
vm: &VirtualMachine,
) -> PyResult<()> {
let zelf = zelf.try_to_ref::<Self>(vm)?;
match value {
PySetterValue::Assign(value) => {
if let Some(setter) = zelf.setter.read().as_ref() {
setter.call((obj, value), vm).map(drop)
} else {
Err(vm.new_attribute_error("property has no setter".to_owned()))
}
}
PySetterValue::Delete => {
if let Some(deleter) = zelf.deleter.read().as_ref() {
deleter.call((obj,), vm).map(drop)
} else {
Err(vm.new_attribute_error("property has no deleter".to_owned()))
}
}
}
}
#[pymethod]
fn __set__(
zelf: PyObjectRef,
obj: PyObjectRef,
value: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<()> {
Self::descr_set(&zelf, obj, PySetterValue::Assign(value), vm)
}
#[pymethod]
fn __delete__(zelf: PyObjectRef, obj: PyObjectRef, vm: &VirtualMachine) -> PyResult<()> {
Self::descr_set(&zelf, obj, PySetterValue::Delete, vm)
}
// Access functions
#[pygetset]
fn fget(&self) -> Option<PyObjectRef> {
self.getter.read().clone()
}
#[pygetset]
fn fset(&self) -> Option<PyObjectRef> {
self.setter.read().clone()
}
#[pygetset]
fn fdel(&self) -> Option<PyObjectRef> {
self.deleter.read().clone()
}
fn doc_getter(&self) -> Option<PyObjectRef> {
self.doc.read().clone()
}
fn doc_setter(&self, value: Option<PyObjectRef>) {
*self.doc.write() = value;
}
#[pymethod(magic)]
fn set_name(&self, args: PosArgs, vm: &VirtualMachine) -> PyResult<()> {
let func_args = args.into_args(vm);
let func_args_len = func_args.args.len();
let (_owner, name): (PyObjectRef, PyObjectRef) = func_args.bind(vm).map_err(|_e| {
vm.new_type_error(format!(
"__set_name__() takes 2 positional arguments but {} were given",
func_args_len
))
})?;
*self.name.write() = Some(name);
Ok(())
}
// Python builder functions
#[pymethod]
fn getter(
zelf: PyRef<Self>,
getter: Option<PyObjectRef>,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
PyProperty {
getter: PyRwLock::new(getter.or_else(|| zelf.fget())),
setter: PyRwLock::new(zelf.fset()),
deleter: PyRwLock::new(zelf.fdel()),
doc: PyRwLock::new(None),
name: PyRwLock::new(None),
}
.into_ref_with_type(vm, zelf.class().to_owned())
}
#[pymethod]
fn setter(
zelf: PyRef<Self>,
setter: Option<PyObjectRef>,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
PyProperty {
getter: PyRwLock::new(zelf.fget()),
setter: PyRwLock::new(setter.or_else(|| zelf.fset())),
deleter: PyRwLock::new(zelf.fdel()),
doc: PyRwLock::new(None),
name: PyRwLock::new(None),
}
.into_ref_with_type(vm, zelf.class().to_owned())
}
#[pymethod]
fn deleter(
zelf: PyRef<Self>,
deleter: Option<PyObjectRef>,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
PyProperty {
getter: PyRwLock::new(zelf.fget()),
setter: PyRwLock::new(zelf.fset()),
deleter: PyRwLock::new(deleter.or_else(|| zelf.fdel())),
doc: PyRwLock::new(None),
name: PyRwLock::new(None),
}
.into_ref_with_type(vm, zelf.class().to_owned())
}
#[pygetset(magic)]
fn isabstractmethod(&self, vm: &VirtualMachine) -> PyObjectRef {
let getter_abstract = match self.getter.read().to_owned() {
Some(getter) => getter
.get_attr("__isabstractmethod__", vm)
.unwrap_or_else(|_| vm.ctx.new_bool(false).into()),
_ => vm.ctx.new_bool(false).into(),
};
let setter_abstract = match self.setter.read().to_owned() {
Some(setter) => setter
.get_attr("__isabstractmethod__", vm)
.unwrap_or_else(|_| vm.ctx.new_bool(false).into()),
_ => vm.ctx.new_bool(false).into(),
};
vm._or(&setter_abstract, &getter_abstract)
.unwrap_or_else(|_| vm.ctx.new_bool(false).into())
}
#[pygetset(magic, setter)]
fn set_isabstractmethod(&self, value: PyObjectRef, vm: &VirtualMachine) -> PyResult<()> {
if let Some(getter) = self.getter.read().to_owned() {
getter.set_attr("__isabstractmethod__", value, vm)?;
}
Ok(())
}
}
impl Constructor for PyProperty {
type Args = FuncArgs;
fn py_new(cls: PyTypeRef, _args: FuncArgs, vm: &VirtualMachine) -> PyResult {
PyProperty {
getter: PyRwLock::new(None),
setter: PyRwLock::new(None),
deleter: PyRwLock::new(None),
doc: PyRwLock::new(None),
name: PyRwLock::new(None),
}
.into_ref_with_type(vm, cls)
.map(Into::into)
}
}
impl Initializer for PyProperty {
type Args = PropertyArgs;
fn init(zelf: PyRef<Self>, args: Self::Args, _vm: &VirtualMachine) -> PyResult<()> {
*zelf.getter.write() = args.fget;
*zelf.setter.write() = args.fset;
*zelf.deleter.write() = args.fdel;
*zelf.doc.write() = args.doc;
*zelf.name.write() = args.name.map(|a| a.as_object().to_owned());
Ok(())
}
}
pub(crate) fn init(context: &Context) {
PyProperty::extend_class(context, context.types.property_type);
// This is a bit unfortunate, but this instance attribute overlaps with the
// class __doc__ string..
extend_class!(context, context.types.property_type, {
"__doc__" => context.new_getset(
"__doc__",
context.types.property_type,
PyProperty::doc_getter,
PyProperty::doc_setter,
),
});
}
|
use super::{
boolean::{False, True},
private::Sealed,
};
/// Type-level Equality.
pub trait Equality<L>: Sealed {
/// Whether `Self` equals to `L`.
type Output;
}
/// Syntactic sugar for Equality.
pub type Eql<L, R> = <L as Equality<R>>::Output;
impl Equality<False> for False {
type Output = True;
}
impl Equality<True> for False {
type Output = False;
}
impl Equality<False> for True {
type Output = False;
}
impl Equality<True> for True {
type Output = True;
}
|
use crate::system::System;
#[derive(Clone, Debug)]
pub struct Context<'a, T> {
pub value: T,
pub system: Option<&'a System>,
}
|
use std::collections::HashMap;
use std::ops::Range;
#[derive(Debug, Clone)]
pub struct Bucket {
scale: f32,
grids: HashMap<(i32, i32), usize>,
min: (i32, i32),
max: (i32, i32),
}
impl Bucket {
pub fn new(scale: f32) -> Bucket {
Bucket {
scale,
grids: HashMap::new(),
min: (0, 0),
max: (0, 0),
}
}
pub fn try_insert<E>(
&mut self,
iter: impl Iterator<Item = Result<(f32, f32), E>>,
) -> Result<(), E> {
for coords in iter {
self.insert(coords?);
}
Ok(())
}
pub fn insert(&mut self, coords: (f32, f32)) {
let c = (
(coords.0 / self.scale).floor() as i32,
(coords.1 / self.scale).floor() as i32,
);
self.min = (self.min.0.min(c.0), self.min.1.min(c.1));
self.max = (self.max.0.max(c.0), self.max.1.max(c.1));
self.grids.entry(c).and_modify(|cnt| *cnt += 1).or_insert(1);
}
pub fn merge(self, other: Bucket) -> Bucket {
assert_eq!(self.scale, other.scale);
let scale = self.scale;
let min = (self.min.0.min(other.min.0), self.min.1.min(other.min.1));
let max = (self.max.0.max(other.max.0), self.max.1.max(other.max.1));
let mut grids = self.grids;
for (pos, cnt) in other.grids.into_iter() {
grids.entry(pos).and_modify(|c| *c += cnt).or_insert(cnt);
}
Bucket {
scale,
grids,
min,
max,
}
}
pub fn set_boundary(&mut self, (x_min, y_max): (f32, f32), (x_max, y_min): (f32, f32)) {
let x_min = (x_min / self.scale).floor() as i32;
let x_max = (x_max / self.scale).floor() as i32;
let y_min = (y_min / self.scale).floor() as i32;
let y_max = (y_max / self.scale).floor() as i32;
self.min = (x_min, y_min);
self.max = (x_max, y_max);
}
pub fn dim(&self) -> (u32, u32) {
(
(self.max.0 - self.min.0) as u32 + 1,
(self.max.1 - self.min.1) as u32 + 1,
)
}
pub fn scale(&self) -> f32 {
self.scale
}
pub fn max_count(&self) -> usize {
self.grids.iter().map(|(_, c)| *c).max().unwrap_or(0)
}
pub fn get(&self, x: u32, y: u32) -> usize {
self.grids.get(&self.bucket_idx(x, y)).copied().unwrap_or(0)
}
pub fn real_pixcel(&self, x: u32, y: u32) -> Pixcel {
let (x, y) = self.bucket_idx(x, y);
Pixcel::new(x, y, self.scale)
}
fn bucket_idx(&self, x: u32, y: u32) -> (i32, i32) {
(
((x as i32) + self.min.0) as i32,
((y as i32) + self.min.1) as i32,
)
}
}
pub struct Pixcel {
x_range: Range<f32>,
y_range: Range<f32>,
}
impl Pixcel {
fn new(x: i32, y: i32, scale: f32) -> Pixcel {
let x_min = (x as f32) * scale;
let y_min = (y as f32) * scale;
let x_max = x_min + scale;
let y_max = y_min + scale;
Pixcel {
x_range: Range {
start: x_min,
end: x_max,
},
y_range: Range {
start: y_min,
end: y_max,
},
}
}
pub fn x_contains_mult_of(&self, base: f32) -> bool {
contains_mult_of(&self.x_range, base)
}
pub fn y_contains_mult_of(&self, base: f32) -> bool {
contains_mult_of(&self.y_range, base)
}
}
fn contains_mult_of(range: &Range<f32>, base: f32) -> bool {
assert!(base > 0.0, "base must be positive.");
// mult_le_start is mult of base, always mult_le_start <= range.start
let mult_le_start = (range.start / base).floor() * base;
// range.start is mult of base
if mult_le_start >= range.start - std::f32::EPSILON {
return true;
}
// If next mult of mult_le_start is smaller than range.end, range contains mult.
// (mult_le_start < range.start < mult_le_start + base < range.end)
mult_le_start + base < range.end
}
#[cfg(test)]
mod test {
use super::*;
use std::ops::Range;
#[test]
fn contains_eq_start_1() {
let range = Range {
start: 1.3,
end: 1.4,
};
assert!(contains_mult_of(&range, 1.3));
}
#[test]
fn contains_eq_start_2() {
let range = Range {
start: 2.6,
end: 2.7,
};
assert!(contains_mult_of(&range, 1.3));
}
#[test]
fn contains_in_range() {
let range = Range {
start: 1.3,
end: 1.5,
};
assert!(contains_mult_of(&range, 1.4));
}
#[test]
fn not_contains_lt_start() {
let range = Range {
start: 1.3,
end: 1.4,
};
assert!(!contains_mult_of(&range, 1.29999));
}
#[test]
fn not_contains_eq_end() {
let range = Range {
start: 1.3,
end: 1.4,
};
assert!(!contains_mult_of(&range, 1.4));
}
#[test]
fn not_contains_gt_end() {
let range = Range {
start: 1.3,
end: 1.4,
};
assert!(!contains_mult_of(&range, 1.40001));
}
#[test]
fn contains_multiple() {
let range = Range {
start: 1.3,
end: 100.0,
};
assert!(contains_mult_of(&range, 1.5));
}
#[test]
fn contains_eq_ne_start() {
let range = Range {
start: -1.3,
end: -1.2,
};
assert!(contains_mult_of(&range, 1.3));
}
#[test]
fn not_contains_eq_ne_end() {
let range = Range {
start: -1.3,
end: -1.2,
};
assert!(!contains_mult_of(&range, 1.2));
}
}
|
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use]
extern crate rocket;
#[cfg(test)]
mod e2e;
pub mod jobs;
pub mod routes;
use routes::root;
fn main() {
root().launch();
}
|
use std::ops::Deref;
use board::grid::Grid;
use board::stats::stone_score::StoneScore;
use board::stats::stone_stats::StoneStats;
use board::stones::group::GoGroup;
use board::stones::grouprc::GoGroupRc;
use board::stones::stone::Stone;
use graph_lib::topology::Topology;
use crate::board::go_state::GoState;
pub trait FullStats {
fn score(&self, stone: Stone) -> StoneScore;
fn stats(&self, stone: Stone) -> StoneStats;
fn capture(&mut self, group: &GoGroupRc);
fn set_territory(&mut self, stone: Stone, n: usize);
}
#[derive(Debug, Copy, Clone)]
pub struct BoardStats {
black: StoneStats,
white: StoneStats,
none: StoneStats,
pub round: usize,
}
impl FullStats for BoardStats {
fn score(&self, stone: Stone) -> StoneScore {
let x = self.for_stone(stone);
let y = self.for_stone(stone.switch());
StoneScore {
stone: stone,
territory: x.territory,
captures: y.captured,
}
}
fn stats(&self, stone: Stone) -> StoneStats {
let x = self.for_stone(stone);
StoneStats {
stone,
groups: x.groups,
stones: x.stones,
captured: x.captured,
territory: 0,
}
}
fn capture(&mut self, group: &GoGroupRc) {
let stone_argument = group.borrow().stone;
let n = group.borrow().stones();
if stone_argument != Stone::None {
self.for_stone_mut(stone_argument).captured += n;
};
self.rem_group(group.borrow().deref());
self.for_stone_mut(Stone::None).groups += 1;
}
fn set_territory(&mut self, stone: Stone, n: usize) {
self.for_stone_mut(stone).territory = n;
}
}
impl BoardStats {
pub fn assert_eq(&self, other: &BoardStats) {
self.black.assert_eq(&other.black);
self.white.assert_eq(&other.white);
self.none.assert_eq(&other.none);
}
pub fn new(goban: &Grid) -> BoardStats {
let mut res = BoardStats {
black: StoneStats::init(Stone::Black),
white: StoneStats::init(Stone::White),
none: StoneStats::init(Stone::None),
round: 0,
};
res.for_stone_mut(Stone::None).stones = goban.vertex_number();
res.for_stone_mut(Stone::None).groups = 1;
res
}
pub fn from_board(board: &GoState) -> BoardStats {
BoardStats {
black: StoneStats::new(Stone::Black, board),
white: StoneStats::new(Stone::White, board),
none: StoneStats::new(Stone::None, board),
round: board.stats.round,
}
}
#[inline]
pub fn for_stone(&self, stone: Stone) -> &StoneStats {
match stone {
Stone::Black => &self.black,
Stone::White => &self.white,
Stone::None => &self.none
}
}
#[inline]
pub fn for_stone_mut(&mut self, stone: Stone) -> &mut StoneStats {
match stone {
Stone::Black => &mut self.black,
Stone::White => &mut self.white,
Stone::None => &mut self.none
}
}
pub fn add_group(&mut self, group: &GoGroup) {
let n = group.stones();
match group.stone {
Stone::Black => {
self.black.groups += 1;
self.black.stones += n;
self.none.stones -= n;
}
Stone::White => {
self.white.groups += 1;
self.white.stones += n;
self.none.stones -= n;
}
Stone::None => {
self.none.groups += 1;
self.none.stones += n;
}
}
}
pub fn rem_group(&mut self, group: &GoGroup) {
let n = group.stones();
match group.stone {
Stone::Black => {
self.black.groups -= 1;
self.black.stones -= n;
self.none.stones += n;
}
Stone::White => {
self.white.groups -= 1;
self.white.stones -= n;
self.none.stones += n;
}
Stone::None => {
self.none.groups -= 1;
self.none.stones -= n;
}
}
}
}
|
//! ### PE-Specific Compilation Checks:
//!
//! * Binary Type
//! * Compiler Runtime
//! * Debug Info Stripped
//!
//! ### Exploit Mitigations:
//!
//! * Data Execution Prevention (DEP / NX)
//! * Dynamic Base
//! * Structured Exception Handling (SEH)
//! * Code Integrity
//! * Control Flow Guard
use goblin::pe::characteristic::*;
use goblin::pe::PE;
use serde_json::json;
use crate::check::{Analyze, GenericMap};
use crate::errors::BinResult;
use crate::rules;
impl Analyze for PE<'_> {
fn run_compilation_checks(&self, bytes: &[u8]) -> BinResult<GenericMap> {
let mut comp_map = GenericMap::new();
// supported: DLL or EXE
let bintype: &str = match self.is_lib {
true => "DLL",
false => "EXE",
};
comp_map.insert("Binary Type".to_string(), json!(bintype));
// debug info stripped
let debug_stripped: bool = matches!(
self.header.coff_header.characteristics & IMAGE_FILE_DEBUG_STRIPPED,
0
);
comp_map.insert("Debug Stripped".to_string(), json!(debug_stripped));
// pattern match for compilers
let runtime = self.detect_compiler_runtime(rules::PE_COMPILER_RULES, bytes)?;
comp_map.insert("Compiler Runtime".to_string(), json!(runtime));
Ok(comp_map)
}
fn run_mitigation_checks(&self) -> GenericMap {
let mut mitigation_checks: GenericMap = GenericMap::new();
if let Some(optional_header) = self.header.optional_header {
let dll_chars: u16 = optional_header.windows_fields.dll_characteristics;
let image_chars: u16 = self.header.coff_header.characteristics;
// context independent mitigations
let dep: bool = matches!(dll_chars & 0x0100, 0);
mitigation_checks.insert("Data Execution Protection (DEP)".to_string(), json!(dep));
let dynamic_base: bool = matches!(dll_chars & 0x0040, 0);
mitigation_checks.insert("Dynamic Base".to_string(), json!(dynamic_base));
let seh: bool = matches!(dll_chars & 0x0400, 0);
mitigation_checks.insert(
"Structured Exception Handling (SEH)".to_string(),
json!(!seh),
);
let isolation_aware: bool = matches!(dll_chars & 0x0200, 0);
mitigation_checks.insert(
"Isolation-Aware Execution".to_string(),
json!(!isolation_aware),
);
// context dependent mitigations: some don't work without existence of other checks
let aslr: bool = dynamic_base && matches!(image_chars & IMAGE_FILE_RELOCS_STRIPPED, 0);
mitigation_checks.insert(
"Address Space Layout Randomization (ASLR)".to_string(),
json!(aslr),
);
let high_entropy: bool = aslr && matches!(dll_chars & 0x0020, 0);
mitigation_checks.insert("High Entropy".to_string(), json!(high_entropy));
let cfg: bool = aslr && matches!(dll_chars & 0x4000, 0);
mitigation_checks.insert("Control Flow Guard (CFG)".to_string(), json!(cfg));
let code_integrity: bool = aslr && matches!(dll_chars & 0x0080, 0);
mitigation_checks.insert("Code Integrity".to_string(), json!(code_integrity));
}
mitigation_checks
}
}
|
pub trait GeneratorI {
/*
需要:seed
1. seed (16bytes)+ seq (4bytes) seq=0
2. SHA512Half
3. 与椭圆最大数比,如果大于,seq+1,重复步骤1
4. 获得private generator 32字节
*/
fn private_generator(&self, masterphrase: &Vec<u8>) -> Vec<u8>;
/*
需要:private generator,椭圆常数G
1. Private generator与椭圆常数G进行乘法运算
2. 获得椭圆点(x,y)
3. 压缩成(02+X 如Y 偶), 或(03+X 如 Y 奇)
4. 获得public generator 33字节
*/
fn public_generator(&self, private_generator: &Vec<u8>) -> Vec<u8>;
/*
需要:private generator, public generator
1. Public generator + 4字节seq + 4 字节 subseq, seq=subseq=0
2. SHA512Half
3. 与椭圆最大数比,如果大于,subseq+1,重复步骤1
4. 获得hash,32字节
5. 与private generator相加,获得结果,32字节
*/
fn generate_private_key(&self, private_generator: &Vec<u8>, public_generator: &Vec<u8>) -> Vec<u8>;
/*
需要:public generator
1. Public generator + 4字节seq + 4 字节 subseq, seq=subseq=0
2. SHA512Half
3. 与椭圆最大数比,如果大于,subseq+1,重复步骤1
4. 获得hash,32字节
5. 与椭圆常数G进行乘法运算
6. 获得椭圆点A(x,y)
7. 将public generator解压为椭圆点B(x,y)
8. A+B = C
9. 压缩成public key 33字节
*/
fn generate_public_key(&self, public_generator: &Vec<u8>) -> Vec<u8>;
/*
需要:public key
1. 对public key进行SHA256
2. 再进行RIPEMD160,获得20字节
3. 前置‘0’+20字节
4. SHA256 获得hash
5. 取前4字节为校验
6. 合并 ‘0’+20字节+4字节
7. 加前置0变成正数
8. 映射到base58,34字节
*/
fn human_readable_public_key(&self, public_key: &Vec<u8>) -> String;
/*
需要:public generator
1. Public generator + 4字节seq + 4 字节 subseq, seq=subseq=0
2. SHA512Half
3. 与椭圆最大数比,如果大于,subseq+1,重复步骤1
4. 获得hash,32字节
*/
fn public_key_root_generator(&self, public_generator: &Vec<u8>) -> Vec<u8>;
} |
#[doc = "Writer for register C10IFCR"]
pub type W = crate::W<u32, super::C10IFCR>;
#[doc = "Register C10IFCR `reset()`'s with value 0"]
impl crate::ResetValue for super::C10IFCR {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Write proxy for field `CTEIF10`"]
pub struct CTEIF10_W<'a> {
w: &'a mut W,
}
impl<'a> CTEIF10_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Write proxy for field `CCTCIF10`"]
pub struct CCTCIF10_W<'a> {
w: &'a mut W,
}
impl<'a> CCTCIF10_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Write proxy for field `CBRTIF10`"]
pub struct CBRTIF10_W<'a> {
w: &'a mut W,
}
impl<'a> CBRTIF10_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Write proxy for field `CBTIF10`"]
pub struct CBTIF10_W<'a> {
w: &'a mut W,
}
impl<'a> CBTIF10_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Write proxy for field `CLTCIF10`"]
pub struct CLTCIF10_W<'a> {
w: &'a mut W,
}
impl<'a> CLTCIF10_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
impl W {
#[doc = "Bit 0 - Channel x clear transfer error interrupt flag Writing a 1 into this bit clears TEIFx in the MDMA_ISRy register"]
#[inline(always)]
pub fn cteif10(&mut self) -> CTEIF10_W {
CTEIF10_W { w: self }
}
#[doc = "Bit 1 - Clear Channel transfer complete interrupt flag for channel x Writing a 1 into this bit clears CTCIFx in the MDMA_ISRy register"]
#[inline(always)]
pub fn cctcif10(&mut self) -> CCTCIF10_W {
CCTCIF10_W { w: self }
}
#[doc = "Bit 2 - Channel x clear block repeat transfer complete interrupt flag Writing a 1 into this bit clears BRTIFx in the MDMA_ISRy register"]
#[inline(always)]
pub fn cbrtif10(&mut self) -> CBRTIF10_W {
CBRTIF10_W { w: self }
}
#[doc = "Bit 3 - Channel x Clear block transfer complete interrupt flag Writing a 1 into this bit clears BTIFx in the MDMA_ISRy register"]
#[inline(always)]
pub fn cbtif10(&mut self) -> CBTIF10_W {
CBTIF10_W { w: self }
}
#[doc = "Bit 4 - CLear buffer Transfer Complete Interrupt Flag for channel x Writing a 1 into this bit clears TCIFx in the MDMA_ISRy register"]
#[inline(always)]
pub fn cltcif10(&mut self) -> CLTCIF10_W {
CLTCIF10_W { w: self }
}
}
|
#![cfg_attr(not(feature = "std"), no_std)]
use frame_support::{decl_module, decl_storage, ensure, decl_event, decl_error, dispatch, traits::Get,};
//use frame_system::{self as system, ensure_root, ensure_signed};
use frame_system::{self as system, ensure_signed};
use primitives::{Vpp, ApprovalStatus, Parliament};
use sp_std::prelude::*;
#[cfg(test)]
mod mock;
#[cfg(test)]
mod tests;
/// The pallet's configuration trait.
pub trait Trait: system::Trait {
/// The overarching event type.
type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>;
type Vpp: Vpp<Self::AccountId>;
type MaxMemberCount: Get<usize>;
}
// This pallet's storage items.
decl_storage! {
trait Store for Module<T: Trait> as TemplateModule {
Members get(fn get_members): Vec<T::AccountId>;
SuperOwner get(fn get_superowner): T::AccountId;
}
}
// The pallet's events
decl_event!(
pub enum Event<T> where AccountId = <T as system::Trait>::AccountId {
ForceToAddMember(AccountId),
ForceToRemoveMember(AccountId),
}
);
// The pallet's errors
decl_error! {
pub enum Error for Module<T: Trait> {
AlreadyMember,
NotMember,
TooManyMembers,
}
}
// The pallet's dispatchable functions.
decl_module! {
/// The module declaration.
pub struct Module<T: Trait> for enum Call where origin: T::Origin {
// Initializing errors
type Error = Error<T>;
// Initializing events
fn deposit_event() = default;
/*
#[weight = 0]
pub fn init_superowner(
origin
) -> dispatch:: DispatchResult {
let sender = ensure_signed(origin)?;
SuperOwner::<T>::put(sender);
Ok(())
}
*/
#[weight = 0]
pub fn accept_vpp(
origin,
who: T::AccountId,
idx: u64
) -> dispatch::DispatchResult {
let sender = ensure_signed(origin)?;
let members = Self::get_members();
// fixme: uncomment
ensure!(members.contains(&sender), Error::<T>::NotMember);
T::Vpp::update_status(&who, idx, ApprovalStatus::Passed)?;
Ok(())
}
#[weight = 0]
pub fn deny_vpp(
origin,
who: T::AccountId,
idx: u64
) -> dispatch::DispatchResult {
let sender = ensure_signed(origin)?;
let members = Self::get_members();
// fixme: uncomment
ensure!(members.contains(&sender), Error::<T>::NotMember);
T::Vpp::update_status(&who, idx, ApprovalStatus::Denied)?;
Ok(())
}
#[weight = 0]
pub fn force_add_member(
origin,
new_member: T::AccountId
) -> dispatch::DispatchResult {
ensure_signed(origin)?;
let members = Self::get_members();
ensure!(members.len() < T::MaxMemberCount::get(), Error::<T>::TooManyMembers);
ensure!(!members.contains(&new_member), Error::<T>::AlreadyMember);
Members::<T>::append(&new_member);
Self::deposit_event(RawEvent::ForceToAddMember(new_member));
Ok(())
}
#[weight = 0]
pub fn force_remove_member(
origin,
old_member: T::AccountId
) -> dispatch::DispatchResult {
ensure_signed(origin)?;
ensure!(Self::get_members().contains(&old_member), Error::<T>::NotMember);
<Members<T>>::mutate(|mem| mem.retain(|m| m != &old_member));
Self::deposit_event(RawEvent::ForceToRemoveMember(old_member));
Ok(())
}
}
}
impl<T:Trait> Parliament<T::AccountId> for Module<T>{
fn is_member(who: &T::AccountId) -> bool {
Self::get_members().contains(&who)
}
}
|
use failure::{Error, Fail};
use log::*;
use rand::seq::SliceRandom;
use rand::thread_rng;
use regex::Regex;
use std::borrow::BorrowMut;
use std::ffi::OsStr;
use std::io::{BufRead, BufReader};
use std::net;
use std::process::{Child, Command, Stdio};
use which::which;
#[cfg(windows)]
use winreg::{enums::HKEY_LOCAL_MACHINE, RegKey};
use super::waiting_helpers::{wait_for_mut, WaitOptions};
pub struct Process {
_child_process: TemporaryProcess,
pub debug_ws_url: String,
}
#[derive(Debug, Fail)]
enum ChromeLaunchError {
#[fail(display = "Chrome launched, but didn't give us a WebSocket URL before we timed out")]
PortOpenTimeout,
#[fail(display = "There are no available ports between 8000 and 9000 for debugging")]
NoAvailablePorts,
#[fail(display = "The chosen debugging port is already in use")]
DebugPortInUse,
}
#[cfg(windows)]
fn get_chrome_path_from_registry() -> Option<std::path::PathBuf> {
RegKey::predef(HKEY_LOCAL_MACHINE)
.open_subkey("SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\App Paths\\chrome.exe")
.and_then(|key| key.get_value::<String, _>(""))
.map(std::path::PathBuf::from)
.ok()
}
struct TemporaryProcess(Child);
impl Drop for TemporaryProcess {
fn drop(&mut self) {
info!("Killing Chrome. PID: {}", self.0.id());
self.0.kill().and_then(|_| self.0.wait()).ok();
}
}
/// Represents the way in which Chrome is run. By default it will search for a Chrome
/// binary on the system, use an available port for debugging, and start in headless mode.
#[derive(Builder)]
pub struct LaunchOptions<'a> {
/// Determintes whether to run headless version of the browser. Defaults to true.
#[builder(default = "true")]
headless: bool,
/// Launch the browser with a specific debugging port.
#[builder(default = "None")]
port: Option<u16>,
/// Path for Chrome or Chromium.
///
/// If unspecified, the create will try to automatically detect a suitable binary.
#[builder(default = "self.default_executable()?")]
path: std::path::PathBuf,
/// A list of Chrome extensions to load.
///
/// An extension should be a path to a folder containing the extension code.
/// CRX files cannot be used directly and must be first extracted.
///
/// Note that Chrome does not support loading extensions in headless-mode.
/// See https://bugs.chromium.org/p/chromium/issues/detail?id=706008#c5
#[builder(default)]
extensions: Vec<&'a OsStr>,
}
impl<'a> LaunchOptionsBuilder<'a> {
fn default_executable(&self) -> Result<std::path::PathBuf, String> {
// TODO Look at $BROWSER and if it points to a chrome binary
// $BROWSER may also provide default arguments, which we may
// or may not override later on.
for app in &["google-chrome-stable", "chromium"] {
if let Ok(path) = which(app) {
return Ok(path);
}
}
#[cfg(target_os = "macos")]
{
let default_paths =
&["/Applications/Google Chrome.app/Contents/MacOS/Google Chrome"][..];
for path in default_paths {
if std::path::Path::new(path).exists() {
return Ok(path.into());
}
}
}
#[cfg(windows)]
{
if let Some(path) = get_chrome_path_from_registry() {
if path.exists() {
return Ok(path);
}
}
}
Err("Could not auto detect a chrome executable".to_string())
}
}
impl Process {
pub fn new(launch_options: LaunchOptions) -> Result<Self, Error> {
info!("Trying to start Chrome");
let mut process = Self::start_process(&launch_options)?;
info!("Started Chrome. PID: {}", process.0.id());
let url;
let mut attempts = 0;
loop {
if attempts > 10 {
return Err(ChromeLaunchError::NoAvailablePorts {}.into());
}
match Self::ws_url_from_output(process.0.borrow_mut()) {
Ok(debug_ws_url) => {
url = debug_ws_url;
break;
}
Err(error) => {
trace!("Problem getting WebSocket URL from Chrome: {}", error);
if launch_options.port.is_none() {
process = Self::start_process(&launch_options)?;
} else {
return Err(error);
}
}
}
trace!(
"Trying again to find available debugging port. Attempts: {}",
attempts
);
attempts += 1;
}
Ok(Self {
_child_process: process,
debug_ws_url: url,
})
}
fn start_process(launch_options: &LaunchOptions) -> Result<TemporaryProcess, Error> {
let debug_port = if let Some(port) = launch_options.port {
port
} else {
get_available_port().ok_or(ChromeLaunchError::NoAvailablePorts {})?
};
let port_option = format!("--remote-debugging-port={}", debug_port);
// NOTE: picking random data dir so that each a new browser instance is launched
// (see man google-chrome)
let user_data_dir = ::tempfile::Builder::new()
.prefix("rust-headless-chrome-profile")
.tempdir()?;
let data_dir_option = format!("--user-data-dir={}", user_data_dir.path().to_str().unwrap());
trace!("Chrome will have profile: {}", data_dir_option);
let mut args = vec![
port_option.as_str(),
"--verbose",
"--no-first-run",
data_dir_option.as_str(),
// "--window-size=1920,1080"
];
if launch_options.headless {
args.extend(&["--headless"]);
}
let extension_args: Vec<String> = launch_options
.extensions
.iter()
.map(|e| format!("--load-extension={}", e.to_str().unwrap()))
.collect();
args.extend(extension_args.iter().map(String::as_str));
let process = TemporaryProcess(
Command::new(&launch_options.path)
.args(&args)
.stderr(Stdio::piped())
.spawn()?,
);
Ok(process)
}
fn ws_url_from_output(child_process: &mut Child) -> Result<String, Error> {
let port_taken = "Address already in use";
let re = Regex::new(r"listening on (.*/devtools/browser/.*)$").unwrap();
let extract = |text: &str| -> Option<String> {
let caps = re.captures(text);
let cap = &caps?[1];
Some(cap.into())
};
let chrome_output_result = wait_for_mut(
|| {
let my_stderr = BufReader::new(child_process.stderr.as_mut().unwrap());
for line in my_stderr.lines() {
let chrome_output = line.ok()?;
trace!("Chrome output: {}", chrome_output);
if chrome_output.contains(port_taken) {
return None;
}
let answer = extract(&chrome_output);
if answer.is_some() {
return answer;
}
}
None
},
WaitOptions {
timeout_ms: 5000,
sleep_ms: 10,
},
);
if let Ok(output) = chrome_output_result {
if output.contains(port_taken) {
trace!(
"Chrome is complaining about the debugging port already being in use: {}",
output
);
Err(ChromeLaunchError::DebugPortInUse {}.into())
} else {
Ok(output)
}
} else {
Err(ChromeLaunchError::PortOpenTimeout {}.into())
}
}
}
fn get_available_port() -> Option<u16> {
let mut ports: Vec<u16> = (8000..9000).collect();
ports.shuffle(&mut thread_rng());
ports.iter().find(|port| port_is_available(**port)).cloned()
}
fn port_is_available(port: u16) -> bool {
net::TcpListener::bind(("127.0.0.1", port)).is_ok()
}
#[cfg(test)]
mod tests {
use super::*;
use std::thread;
#[cfg(target_os = "linux")]
fn current_child_pids() -> Vec<i32> {
use std::fs::File;
use std::io::prelude::*;
let current_pid = std::process::id();
let mut current_process_children_file = File::open(format!(
"/proc/{}/task/{}/children",
current_pid, current_pid
))
.unwrap();
let mut child_pids = String::new();
current_process_children_file
.read_to_string(&mut child_pids)
.unwrap();
child_pids
.split_whitespace()
.map(|pid_str| pid_str.parse::<i32>().unwrap())
.collect()
}
#[test]
#[cfg(target_os = "linux")]
fn kills_process_on_drop() {
env_logger::try_init().unwrap_or(());
{
let _chrome =
&mut super::Process::new(LaunchOptionsBuilder::default().build().unwrap()).unwrap();
}
let child_pids = current_child_pids();
assert!(child_pids.is_empty());
}
#[test]
fn launch_multiple_non_headless_instances() {
env_logger::try_init().unwrap_or(());
let mut handles = Vec::new();
for _ in 0..10 {
let handle = thread::spawn(|| {
// these sleeps are to make it more likely the chrome startups will overlap
std::thread::sleep(std::time::Duration::from_millis(10));
let chrome =
super::Process::new(LaunchOptionsBuilder::default().build().unwrap()).unwrap();
std::thread::sleep(std::time::Duration::from_millis(100));
chrome.debug_ws_url.clone()
});
handles.push(handle);
}
for handle in handles {
handle.join().unwrap();
}
}
#[test]
fn no_instance_sharing() {
env_logger::try_init().unwrap_or(());
let mut handles = Vec::new();
for _ in 0..10 {
let chrome = super::Process::new(
super::LaunchOptionsBuilder::default()
.headless(true)
.build()
.unwrap(),
)
.unwrap();
handles.push(chrome);
}
}
}
|
extern crate pcre;
use std::fs::File;
use std::io::BufReader;
use std::io::BufRead;
use std::collections::HashMap;
use std::collections::VecDeque;
use pcre::Pcre;
#[derive(Clone, Debug)]
enum Operation {
Nop,
Not,
And,
Or,
LShift,
RShift,
}
#[derive(Clone, Debug)]
struct Gate {
inputs: Vec<String>,
op: Operation,
output: String,
}
fn parse(line: &str) -> Gate {
let mut sig = Pcre::compile(r"^(\d+) -> (\w)+").unwrap();
let mut nop = Pcre::compile(r"^(\w+) -> (\w+)").unwrap();
let mut no = Pcre::compile(r"^NOT (\w+) -> (\w+)").unwrap();
let mut op = Pcre::compile(r"^(\w+) (\w+) (\w+) -> (\w+)").unwrap();
let msig = sig.exec(line);
let mnop = nop.exec(line);
let mno = no.exec(line);
let mop = op.exec(line);
if msig.is_some() {
let msig = msig.unwrap();
Gate{
inputs: vec![msig.group(1).to_string()],
op: Operation::Nop,
output: msig.group(2).to_string(),
}
} else if mnop.is_some() {
let mnop = mnop.unwrap();
Gate {
inputs: vec![mnop.group(1).to_string()],
op: Operation::Nop,
output: mnop.group(2).to_string(),
}
} else if mno.is_some() {
let mno = mno.unwrap();
Gate {
inputs: vec![mno.group(1).to_string()],
op: Operation::Not,
output: mno.group(2).to_string(),
}
} else {
let mop = mop.unwrap();
Gate {
inputs: vec![mop.group(1).to_string(), mop.group(3).to_string()],
op: match mop.group(2) {
"AND" => Operation::And,
"OR" => Operation::Or,
"LSHIFT" => Operation::LShift,
"RSHIFT" => Operation::RShift,
_ => unreachable!(),
},
output: mop.group(4).to_string(),
}
}
}
fn simulate(gates: &[Gate], overrides: Vec<(&str, u16)>) -> HashMap<String, u16> {
let mut signals: HashMap<String, u16> = HashMap::new();
for i in 0..(1<<16) {
signals.insert(i.to_string(), i as u16);
}
for o in &overrides {
let &(s, a) = o;
signals.insert(s.to_string(), a);
}
let mut queue: VecDeque<Gate> = gates.iter().cloned().collect();
while !queue.is_empty() {
let g = queue.pop_front().unwrap();
if signals.contains_key(&g.output) {
continue;
}
let mut all_inputs = true;
for i in &g.inputs {
if !signals.contains_key(i) {
all_inputs = false;
break;
}
}
if !all_inputs {
queue.push_back(g);
continue;
}
match g.op {
Operation::Nop => {
let a = signals[&g.inputs[0]];
signals.insert(g.output, a);
},
Operation::Not => {
let a = signals[&g.inputs[0]];
signals.insert(g.output, !a);
},
Operation::And => {
let a = signals[&g.inputs[0]];
let b = signals[&g.inputs[1]];
signals.insert(g.output, a & b);
},
Operation::Or => {
let a = signals[&g.inputs[0]];
let b = signals[&g.inputs[1]];
signals.insert(g.output, a | b);
},
Operation::LShift => {
let a = signals[&g.inputs[0]];
let b = g.inputs[1].parse::<usize>().unwrap();
signals.insert(g.output, a << b);
},
Operation::RShift => {
let a = signals[&g.inputs[0]];
let b = g.inputs[1].parse::<usize>().unwrap();
signals.insert(g.output, a >> b);
},
};
}
signals
}
fn main() {
let f = File::open("inputs/day07.in").unwrap();
let file = BufReader::new(&f);
let mut gates: Vec<_> = Vec::new();
for line in file.lines() {
let line = line.unwrap();
gates.push(parse(&line));
}
let signals = simulate(&gates, Vec::new());
println!("{}", signals["a"]);
let overrides: Vec<_> = vec![("b", signals["a"])];
let signals = simulate(&gates, overrides);
println!("{}", signals["a"]);
}
|
// http://devernay.free.fr/hacks/chip8/C8TECH10.HTM
extern crate sdl2;
extern crate rand;
extern crate byteorder;
use sdl2::rect::Rect;
use sdl2::pixels::Color;
use sdl2::event::Event;
use sdl2::keyboard::Keycode;
use std::time::Instant;
use std::fs::File;
use std::env;
use rand::Rng;
use byteorder::{BigEndian, ReadBytesExt};
const STACK_SIZE: usize = 256;
const MEM_SIZE: usize = 4096;
const DISPLAY_MAX_X: usize = 64;
const DISPLAY_MAX_Y: usize = 32;
const DISPLAY_PIXEL: usize = 10;
const KEYBOARD_SIZE: usize = 16;
const DELAY_TIMER_HZ: u8 = 60;
const SOUND_TIMER_HZ: u8 = 60;
const COLOR_WHITE: Color = Color::RGB(255, 255, 255);
const COLOR_BLACK: Color = Color::RGB(0, 0, 0);
struct CHIP8 {
vx: [u16; 16], // V registers
i: u16, pc: u16, // I & program counter
stack: [u16; STACK_SIZE], sp: u8,
dt: u8,
st: u8,
ram: [u8; MEM_SIZE],
display: [[u8; DISPLAY_MAX_Y]; DISPLAY_MAX_X],
keyboard: [bool; KEYBOARD_SIZE],
input_lock: i8,
}
// Opcode
fn get_op(op: u16) -> u16 {
(op & 0xF000) >> 12
}
// A 12-bit value, the lowest 12 bits of the instruction
fn get_addr(op: u16) -> u16 {
op & 0x0FFF
}
// A 4-bit value, the lowest 4 bits of the instruction
fn get_nibble(op: u16) -> u16 {
op & 0x000F
}
// A 4-bit value, the lower 4 bits of the high byte of the instruction
fn get_4bit_h(op: u16) -> u16 {
(op & 0x0F00) >> 8
}
// A 4-bit value, the upper 4 bits of the low byte of the instruction
fn get_4bit_l(op: u16) -> u16 {
(op & 0x00F0) >> 4
}
// An 8-bit value, the lowest 8 bits of the instruction
fn get_byte(op: u16) -> u16 {
op & 0x00FF
}
#[derive(Debug)]
enum EvalError {
UnknownOpcode,
}
impl CHIP8 {
pub fn new() -> CHIP8 {
CHIP8 {
vx: [0; 16],
i: 0, pc: 0,
stack: [0; STACK_SIZE], sp: 0,
dt: 0,
st: 0,
ram: [0; MEM_SIZE],
display: [[0; DISPLAY_MAX_Y]; DISPLAY_MAX_X],
keyboard: [false; KEYBOARD_SIZE],
input_lock: -1
}
}
fn init(&mut self) {
let mut addr = 0x0;
// load sprites
for sprite in [
[ 0xf0, 0x90, 0x90, 0x90, 0xf0 ], // 0
[ 0x20, 0x60, 0x20, 0x20, 0x70 ], // 1
[ 0xf0, 0x10, 0xf0, 0x80, 0xf0 ], // 2
[ 0xf0, 0x10, 0xf0, 0x10, 0xf0 ], // 3
[ 0x90, 0x90, 0xf0, 0x10, 0x10 ], // 4
[ 0xf0, 0x80, 0xf0, 0x10, 0xf0 ], // 5
[ 0xf0, 0x80, 0xf0, 0x90, 0xf0 ], // 6
[ 0xf0, 0x10, 0x20, 0x40, 0x40 ], // 7
[ 0xf0, 0x90, 0xf0, 0x90, 0xf0 ], // 8
[ 0xf0, 0x90, 0xf0, 0x10, 0xf0 ], // 9
[ 0xf0, 0x90, 0xf0, 0x90, 0x90 ], // a
[ 0xe0, 0x90, 0xe0, 0x90, 0xe0 ], // b
[ 0xf0, 0x80, 0x80, 0x80, 0xf0 ], // c
[ 0xe0, 0x90, 0x90, 0x90, 0xe0 ], // d
[ 0xf0, 0x80, 0xf0, 0x80, 0xf0 ], // e
[ 0xf0, 0x80, 0xf0, 0x80, 0x80 ], // f
].iter() {
for val in sprite {
self.ram[addr] = *val;
addr += 1;
}
}
}
fn lrom(&mut self, rom_path: &String, offset: usize) {
let mut rom_file = File::open(rom_path).unwrap();
let mut i: usize = 0;
'rom_read: loop {
let inst: u16 = match rom_file.read_u16::<BigEndian>() {
Ok(res) => res,
Err(_e) => break 'rom_read,
};
let idx = i + offset;
self.ram[idx] = (inst >> 8) as u8;
self.ram[idx + 1] = (inst & 0xFF) as u8;
i += 2
}
}
fn step(&mut self) -> Result<(), EvalError> {
print!("pc [{:x?}]: ", self.pc);
let inst =
((self.ram[self.pc as usize] as u16) << 8) |
self.ram[(self.pc as usize) + 1] as u16;
let op = get_op(inst);
let mut increment_after = true;
match op {
0x0 => {
if inst == 0x00e0 {
self.display = [[0; DISPLAY_MAX_Y]; DISPLAY_MAX_X];
println!("cls");
} else if inst == 0x00ee {
self.pc = self.stack[self.sp as usize];
self.sp -= 1;
increment_after = false;
println!("ret");
} else {
let addr = get_addr(inst);
self.pc = addr;
increment_after = false;
println!("sys {:x?}", addr);
}
}
0x1 => {
let addr = get_addr(inst);
self.pc = addr;
increment_after = false;
println!("jp {:x?}", addr);
}
0x2 => {
let addr = get_addr(inst);
self.sp += 1;
self.stack[self.sp as usize] = self.pc;
self.pc = addr;
increment_after = false;
println!("call {:x?}", addr);
}
0x3 => {
let reg = get_4bit_h(inst);
let lit = get_byte(inst);
if self.vx[reg as usize] == lit {
self.pc += 2;
}
println!("se v{:x?} {:x?}", reg, lit);
}
0x4 => {
let reg = get_4bit_h(inst);
let lit = get_byte(inst);
if self.vx[reg as usize] != lit {
self.pc += 2;
}
println!("sne v{:x?} {:x?}", reg, lit);
}
0x5 => {
let regx = get_4bit_h(inst);
let regy = get_4bit_l(inst);
if self.vx[regx as usize] == self.vx[regy as usize] {
self.pc += 2;
}
println!("se v{:x?} v{:x?}", regx, regy);
}
0x6 => {
let reg = get_4bit_h(inst) as usize;
let lit = get_byte(inst);
self.vx[reg] = lit;
println!("ld v{:x?} {:x?}", reg, lit);
}
0x7 => {
let reg = get_4bit_h(inst);
let lit = get_byte(inst);
self.vx[reg as usize] += lit;
println!("add v{:x?} {:x?}", reg, lit);
}
0x9 => {
let regx = get_4bit_h(inst);
let regy = get_4bit_l(inst);
if self.vx[regx as usize] != self.vx[regy as usize] {
self.pc += 2;
}
println!("sne v{:x?} v{:x?}", regx, regy);
}
0xa => {
let addr = get_addr(inst);
self.i = addr;
println!("ld i {:x?}", addr);
}
0xc => {
let mut rng = rand::thread_rng();
let n = rng.gen_range(0, 255);
let reg = get_4bit_h(inst) as usize;
let lit = get_byte(inst);
self.vx[reg] = lit & n;
println!("rnd v{:x?} {:x?}", reg, lit);
}
0xd => {
let regx = get_4bit_h(inst);
let regy = get_4bit_l(inst);
let nibble = get_nibble(inst);
self.vx[0xf] = 0;
let sprite = &self.ram[
self.i as usize .. (self.i + nibble) as usize
];
let mut x_offset = 0;
let mut y_offset = 0;
for val in sprite {
for i in 0 .. 8 {
let prev = self.display
[(regx + x_offset) as usize]
[(regy + y_offset) as usize];
let cur = prev ^ ((val >> i) & 0x1 as u8);
self.display
[(regx + x_offset) as usize]
[(regy + y_offset) as usize] = cur;
x_offset += 1;
if x_offset == DISPLAY_MAX_X as u16 {
y_offset += 1;
x_offset = 0;
if y_offset == DISPLAY_MAX_Y as u16 {
y_offset = 0;
}
}
if prev == 1 && cur == 0 {
self.vx[0xf] = 1;
}
}
y_offset += 1;
x_offset = 0;
if y_offset == DISPLAY_MAX_Y as u16 {
y_offset = 0;
}
}
println!("drw v{:x?} v{:x?} {:x?}", regx, regy, nibble);
}
0xe => {
let reg = get_4bit_h(inst);
let lit = get_byte(inst);
if lit == 0x9e {
if self.keyboard[self.vx[reg as usize] as usize] {
self.pc += 2;
}
} else if lit == 0xa1 {
if !self.keyboard[self.vx[reg as usize] as usize] {
self.pc += 2;
}
} else {
println!(
"pc: {:x?}, opcode: {:x?}, {:x?}",
self.pc,
op,
inst
);
return Err(EvalError::UnknownOpcode)
}
}
0xf => {
let lit = get_byte(inst);
let reg = get_4bit_h(inst);
if lit == 0x33 {
let val = self.vx[reg as usize];
let hundreds = ((val / 100) % 100) as u8;
let tens = ((val / 10) % 10) as u8;
let ones = (val % 10) as u8;
self.ram[self.i as usize] = hundreds;
self.ram[(self.i+1) as usize] = tens;
self.ram[(self.i+2) as usize] = ones;
println!("ld b v{:x?}", reg);
} else if lit == 0x0a {
self.input_lock = reg as i8;
println!("ld b v{:x?}", reg);
} else if lit == 0x29 {
self.i = reg * 5;
println!("ld f v{:x?}", reg);
} else if lit == 0x65 {
for i in 0 .. reg {
self.vx[i as usize] = self.ram[self.i as usize] as u16;
self.i += 1;
}
println!("ld b v{:x?} [i]", reg);
} else {
println!(
"pc: {:x?}, opcode: {:x?}, {:x?}",
self.pc,
op,
inst
);
return Err(EvalError::UnknownOpcode)
}
}
_ => {
println!(
"pc: {:x?}, opcode: {:x?}, {:x?}",
self.pc,
op,
inst
);
return Err(EvalError::UnknownOpcode)
}
}
if increment_after {
self.pc += 2;
}
Ok(())
}
fn check_lock(&mut self, key: u16) {
println!("check lock: {:x?}, L: {:x?}", key, self.input_lock);
if self.input_lock >= 0 {
self.vx[self.input_lock as usize] = key;
self.input_lock = -1
}
}
}
fn main() {
let args: Vec<String> = env::args().collect();
let mut vm: CHIP8 = CHIP8::new();
vm.init();
vm.lrom(&args[1], 0x200);
vm.pc = 0x200;
// sdl init
let sdl_context = sdl2::init().unwrap();
let video_subsystem = sdl_context.video().unwrap();
let window = video_subsystem.window(
"CHIP-8 Emulator",
(DISPLAY_MAX_X * DISPLAY_PIXEL) as u32,
(DISPLAY_MAX_Y * DISPLAY_PIXEL) as u32
)
.position_centered()
.build()
.unwrap();
let mut canvas = window.into_canvas().build().unwrap();
let mut event_pump = sdl_context.event_pump().unwrap();
let mut dt_timer = Instant::now();
let mut st_timer = Instant::now();
let time_dt_ms = 1000 / DELAY_TIMER_HZ as u128;
let time_st_ms = 1000 / SOUND_TIMER_HZ as u128;
'mainloop: loop {
canvas.clear();
for event in event_pump.poll_iter() {
match event {
Event::Quit {..} |
Event::KeyDown { keycode: Some(Keycode::Escape), .. } => {
break 'mainloop
},
Event::KeyDown { keycode: Some(Keycode::Num1), .. } => {
vm.keyboard[0x1] = true;
vm.check_lock(0x1);
},
Event::KeyDown { keycode: Some(Keycode::Num2), .. } => {
vm.keyboard[0x2] = true;
vm.check_lock(0x2);
},
Event::KeyDown { keycode: Some(Keycode::Num3), .. } => {
vm.keyboard[0x3] = true;
vm.check_lock(0x3);
},
Event::KeyDown { keycode: Some(Keycode::Num4), .. } => {
vm.keyboard[0xc] = true;
vm.check_lock(0xc);
},
Event::KeyDown { keycode: Some(Keycode::Q), .. } => {
vm.keyboard[0x4] = true;
vm.check_lock(0x4);
},
Event::KeyDown { keycode: Some(Keycode::W), .. } => {
vm.keyboard[0x5] = true;
vm.check_lock(0x5);
},
Event::KeyDown { keycode: Some(Keycode::E), .. } => {
vm.keyboard[0x6] = true;
vm.check_lock(0x6);
},
Event::KeyDown { keycode: Some(Keycode::R), .. } => {
vm.keyboard[0xd] = true;
vm.check_lock(0xd);
},
Event::KeyDown { keycode: Some(Keycode::A), .. } => {
vm.keyboard[0x7] = true;
vm.check_lock(0x7);
},
Event::KeyDown { keycode: Some(Keycode::S), .. } => {
vm.keyboard[0x8] = true;
vm.check_lock(0x8);
},
Event::KeyDown { keycode: Some(Keycode::D), .. } => {
vm.keyboard[0x9] = true;
vm.check_lock(0x9);
},
Event::KeyDown { keycode: Some(Keycode::F), .. } => {
vm.keyboard[0xe] = true;
vm.check_lock(0xe);
},
Event::KeyDown { keycode: Some(Keycode::Z), .. } => {
vm.keyboard[0xa] = true;
vm.check_lock(0xa);
},
Event::KeyDown { keycode: Some(Keycode::X), .. } => {
vm.keyboard[0x0] = true;
vm.check_lock(0x0);
},
Event::KeyDown { keycode: Some(Keycode::C), .. } => {
vm.keyboard[0xb] = true;
vm.check_lock(0xb);
},
Event::KeyDown { keycode: Some(Keycode::V), .. } => {
vm.keyboard[0xf] = true;
vm.check_lock(0xf);
},
Event::KeyUp { keycode: Some(Keycode::Num1), .. } => {
vm.keyboard[0x1] = false
},
Event::KeyUp { keycode: Some(Keycode::Num2), .. } => {
vm.keyboard[0x2] = false
},
Event::KeyUp { keycode: Some(Keycode::Num3), .. } => {
vm.keyboard[0x3] = false
},
Event::KeyUp { keycode: Some(Keycode::Num4), .. } => {
vm.keyboard[0xc] = false
},
Event::KeyUp { keycode: Some(Keycode::Q), .. } => {
vm.keyboard[0x4] = false
},
Event::KeyUp { keycode: Some(Keycode::W), .. } => {
vm.keyboard[0x5] = false
},
Event::KeyUp { keycode: Some(Keycode::E), .. } => {
vm.keyboard[0x6] = false
},
Event::KeyUp { keycode: Some(Keycode::R), .. } => {
vm.keyboard[0xd] = false
},
Event::KeyUp { keycode: Some(Keycode::A), .. } => {
vm.keyboard[0x7] = false
},
Event::KeyUp { keycode: Some(Keycode::S), .. } => {
vm.keyboard[0x8] = false
},
Event::KeyUp { keycode: Some(Keycode::D), .. } => {
vm.keyboard[0x9] = false
},
Event::KeyUp { keycode: Some(Keycode::F), .. } => {
vm.keyboard[0xe] = false
},
Event::KeyUp { keycode: Some(Keycode::Z), .. } => {
vm.keyboard[0xa] = false
},
Event::KeyUp { keycode: Some(Keycode::X), .. } => {
vm.keyboard[0x0] = false
},
Event::KeyUp { keycode: Some(Keycode::C), .. } => {
vm.keyboard[0xb] = false
},
Event::KeyUp { keycode: Some(Keycode::V), .. } => {
vm.keyboard[0xf] = false
},
_ => {}
}
}
if dt_timer.elapsed().as_millis() > time_dt_ms {
dt_timer = Instant::now();
if vm.dt > 0 {
vm.dt -= 1;
}
}
if st_timer.elapsed().as_millis() > time_st_ms {
st_timer = Instant::now();
if vm.st > 0 {
vm.st -= 1;
}
}
if vm.input_lock < 0 {
match vm.step() {
Ok(_) => {},
Err(_) => break 'mainloop ,
}
}
for y in 0 .. DISPLAY_MAX_Y {
for x in 0 .. DISPLAY_MAX_X {
if vm.display[x][y] == 0 {
canvas.set_draw_color(COLOR_BLACK);
} else {
canvas.set_draw_color(COLOR_WHITE);
}
canvas.fill_rect(
Rect::new(
(x * DISPLAY_PIXEL) as i32, (y * DISPLAY_PIXEL) as i32,
DISPLAY_PIXEL as u32, DISPLAY_PIXEL as u32
)
).unwrap();
}
}
canvas.present();
std::thread::sleep(std::time::Duration::from_millis(100));
}
}
|
#![allow(warnings)]
use downcast_rs::__std::collections::HashMap;
use spacegame::assets::prefab::Prefab;
use spacegame::core::animation::{Animation, AnimationController};
use spacegame::core::timer::Timer;
use spacegame::core::transform::Transform;
use spacegame::gameplay::collision::{BoundingBox, CollisionLayer};
use spacegame::gameplay::enemy::{
Boss1, Enemy, EnemyType, LastBoss, MovementBehavior, Satellite, Spammer,
};
use spacegame::gameplay::health::Health;
use spacegame::gameplay::physics::DynamicBody;
use spacegame::gameplay::player::{Player, Stats};
use spacegame::paths::get_assets_path;
use spacegame::prefab::enemies::EnemyPrefab;
use spacegame::prefab::player::PlayerPrefab;
use spacegame::render::particle::ParticleEmitter;
use spacegame::render::sprite::Sprite;
fn gen_player() {
let player = {
let base_path = get_assets_path();
let mut emitter: ParticleEmitter = serde_json::from_str(
&std::fs::read_to_string(base_path.join("particle/trail.json")).unwrap(),
)
.unwrap();
let scale = 24.0;
let player_prefab = PlayerPrefab {
dynamic_body: DynamicBody {
impulses: vec![],
forces: vec![],
velocity: glam::Vec2::zero(),
max_velocity: 600.0,
mass: 1.0,
max_force: 1500.0,
},
transform: Transform {
translation: glam::Vec2::new(100.0, 100.0),
scale: scale * glam::Vec2::one(),
rotation: 0.0,
dirty: true,
},
sprite: Sprite {
id: "blue_05.png".to_string(),
},
bounding_box: BoundingBox {
half_extend: 20.0 * glam::Vec2::one(),
collision_layer: CollisionLayer::PLAYER,
collision_mask: None,
},
health: Health::new(10.0, Timer::of_seconds(0.5)),
shield: None,
trail: emitter,
stats: Stats {
dmg: 1.0,
crit_percent: 50,
crit_multiplier: 1.5,
missile_percent: 0,
boost_timer: Timer::of_seconds(1.0),
boost_magnitude: 500.0,
},
};
let prefab = &player_prefab as &dyn Prefab;
serde_json::to_string_pretty(prefab).unwrap()
};
std::fs::write("assets/prefab/player.json", player);
}
fn gen_mine() {
let mine = {
let scale = 20.0;
let enemy_prefab = EnemyPrefab {
dynamic_body: DynamicBody {
forces: vec![],
impulses: vec![],
velocity: glam::Vec2::zero(),
max_velocity: 0.0,
mass: 1.0,
max_force: 500.0,
},
transform: Transform {
translation: Default::default(),
scale: scale * glam::Vec2::one(),
rotation: 0.0,
dirty: false,
},
sprite: Sprite {
id: "explosion-05.png".to_string(),
},
bounding_box: BoundingBox {
half_extend: scale / 2.0 * glam::Vec2::one(),
collision_layer: CollisionLayer::MINE,
collision_mask: None,
},
health: Some(Health::new(2.0, Timer::of_seconds(0.5))),
shield: None,
enemy: Enemy {
enemy_type: EnemyType::Mine {
trigger_distance: 200.0,
explosion_timer: {
let mut timer = Timer::of_seconds(2.0);
timer.stop();
timer
},
},
scrap_drop: (0, 0),
pickup_drop_percent: 0,
movement: MovementBehavior::Nothing,
},
trail: None,
animation: Some({
let mut animations = HashMap::new();
animations.insert(
String::from("boum"),
Animation::new(vec![
(String::from("windshield_wiper/00.png"), 0),
(String::from("windshield_wiper/01.png"), 1),
(String::from("windshield_wiper/02.png"), 2),
(String::from("windshield_wiper/03.png"), 3),
(String::from("windshield_wiper/04.png"), 4),
(String::from("windshield_wiper/05.png"), 5),
(String::from("windshield_wiper/06.png"), 6),
(String::from("windshield_wiper/07.png"), 7),
(String::from("windshield_wiper/08.png"), 8),
(String::from("windshield_wiper/09.png"), 9),
(String::from("windshield_wiper/10.png"), 10),
]),
);
let mut animation_controller = AnimationController {
animations,
current_animation: None,
delete_on_finished: false,
};
animation_controller
}),
};
let prefab = &enemy_prefab as &dyn Prefab;
serde_json::to_string_pretty(prefab).unwrap()
};
std::fs::write("assets/prefab/mine.json", mine);
}
fn gen_mine_lander() {
let mine_lander = {
let scale = 24.0;
let enemy_prefab = EnemyPrefab {
dynamic_body: DynamicBody {
forces: vec![],
impulses: vec![],
velocity: glam::Vec2::zero(),
max_velocity: 200.0,
mass: 1.0,
max_force: 500.0,
},
transform: Transform {
translation: Default::default(),
scale: scale * glam::Vec2::one(),
rotation: 0.0,
dirty: false,
},
sprite: Sprite {
id: "red_03.png".to_string(),
},
bounding_box: BoundingBox {
half_extend: scale / 2.0 * glam::Vec2::one(),
collision_layer: CollisionLayer::ENEMY,
collision_mask: None,
},
health: Some(Health::new(3.0, Timer::of_seconds(0.5))),
shield: None,
enemy: Enemy {
enemy_type: EnemyType::MineLander(Timer::of_seconds(4.0)),
scrap_drop: (10, 70),
pickup_drop_percent: 2,
movement: MovementBehavior::RandomPath(glam::Vec2::zero(), false),
},
trail: None,
animation: None,
};
let prefab = &enemy_prefab as &dyn Prefab;
serde_json::to_string_pretty(prefab).unwrap()
};
std::fs::write("assets/prefab/mine_lander.json", mine_lander);
}
fn gen_wanderer() {
let prefab = {
let scale = 24.0;
let enemy_prefab = EnemyPrefab {
dynamic_body: DynamicBody {
forces: vec![],
impulses: vec![],
velocity: glam::Vec2::zero(),
max_velocity: 200.0,
mass: 1.0,
max_force: 500.0,
},
transform: Transform {
translation: Default::default(),
scale: scale * glam::Vec2::one(),
rotation: 0.0,
dirty: false,
},
sprite: Sprite {
id: "red_04.png".to_string(),
},
bounding_box: BoundingBox {
half_extend: scale / 2.0 * glam::Vec2::one(),
collision_layer: CollisionLayer::ENEMY,
collision_mask: None,
},
health: Some(Health::new(3.0, Timer::of_seconds(0.5))),
shield: None,
enemy: Enemy {
enemy_type: EnemyType::Wanderer(Timer::of_seconds(4.0)),
scrap_drop: (10, 70),
pickup_drop_percent: 2,
movement: MovementBehavior::RandomPath(glam::Vec2::zero(), false),
},
trail: None,
animation: None,
};
let prefab = &enemy_prefab as &dyn Prefab;
serde_json::to_string_pretty(prefab).unwrap()
};
std::fs::write("assets/prefab/wanderer.json", prefab);
}
fn gen_base_enemy() {
let base_enemy = {
let scale = 24.0;
let base_path = get_assets_path();
let mut emitter: ParticleEmitter = serde_json::from_str(
&std::fs::read_to_string(base_path.join("particle/enemy_trail.json")).unwrap(),
)
.unwrap();
let enemy_prefab = EnemyPrefab {
dynamic_body: DynamicBody {
impulses: vec![],
forces: vec![],
velocity: glam::Vec2::zero(),
max_velocity: 100.0,
mass: 1.0,
max_force: 500.0,
},
transform: Transform {
translation: Default::default(),
scale: scale * glam::Vec2::one(),
rotation: 0.0,
dirty: false,
},
sprite: Sprite {
id: "darkgrey_02.png".to_string(),
},
bounding_box: BoundingBox {
half_extend: scale / 2.0 * glam::Vec2::one(),
collision_layer: CollisionLayer::ENEMY,
collision_mask: None,
},
health: Some(Health::new(3.0, Timer::of_seconds(0.5))),
shield: None,
enemy: Enemy {
enemy_type: EnemyType::FollowPlayer(Timer::of_seconds(3.0)),
scrap_drop: (10, 40),
pickup_drop_percent: 2,
movement: MovementBehavior::Follow,
},
trail: Some(emitter),
animation: None,
};
let prefab = &enemy_prefab as &dyn Prefab;
serde_json::to_string_pretty(prefab).unwrap()
};
std::fs::write("assets/prefab/base_enemy.json", base_enemy);
}
fn gen_carrier() {
let base_enemy = {
let scale = 128.0;
let base_path = get_assets_path();
let mut emitter: ParticleEmitter = serde_json::from_str(
&std::fs::read_to_string(base_path.join("particle/enemy_trail.json")).unwrap(),
)
.unwrap();
let enemy_prefab = EnemyPrefab {
dynamic_body: DynamicBody {
impulses: vec![],
forces: vec![],
velocity: glam::Vec2::zero(),
max_velocity: 50.0,
mass: 10.0,
max_force: 500.0,
},
transform: Transform {
translation: Default::default(),
scale: scale * glam::Vec2::one(),
rotation: 0.0,
dirty: false,
},
sprite: Sprite {
id: "large_red_01.png".to_string(),
},
bounding_box: BoundingBox {
half_extend: scale / 2.0 * glam::Vec2::one(),
collision_layer: CollisionLayer::ENEMY,
collision_mask: None,
},
health: Some(Health::new(15.0, Timer::of_seconds(0.5))),
shield: None,
enemy: Enemy {
enemy_type: EnemyType::Carrier {
nb_of_spaceships: 4,
time_between_deploy: Timer::of_seconds(12.0),
},
scrap_drop: (10, 40),
pickup_drop_percent: 70,
movement: MovementBehavior::Follow,
},
trail: Some(emitter),
animation: None,
};
let prefab = &enemy_prefab as &dyn Prefab;
serde_json::to_string_pretty(prefab).unwrap()
};
std::fs::write("assets/prefab/carrier.json", base_enemy);
}
fn gen_kamikaze() {
let base_enemy = {
let scale = 20.0;
let base_path = get_assets_path();
let mut emitter: ParticleEmitter = serde_json::from_str(
&std::fs::read_to_string(base_path.join("particle/enemy_trail.json")).unwrap(),
)
.unwrap();
let enemy_prefab = EnemyPrefab {
dynamic_body: DynamicBody {
impulses: vec![],
forces: vec![],
velocity: glam::Vec2::zero(),
max_velocity: 300.0,
mass: 1.0,
max_force: 500.0,
},
transform: Transform {
translation: Default::default(),
scale: scale * glam::Vec2::one(),
rotation: 0.0,
dirty: false,
},
sprite: Sprite {
id: "metalic_06.png".to_string(),
},
bounding_box: BoundingBox {
half_extend: scale / 2.0 * glam::Vec2::one(),
collision_layer: CollisionLayer::ENEMY,
collision_mask: None,
},
health: Some(Health::new(2.0, Timer::of_seconds(0.5))),
shield: None,
enemy: Enemy {
enemy_type: EnemyType::Kamikaze,
scrap_drop: (10, 40),
pickup_drop_percent: 2,
movement: MovementBehavior::GoToPlayer,
},
trail: Some(emitter),
animation: None,
};
let prefab = &enemy_prefab as &dyn Prefab;
serde_json::to_string_pretty(prefab).unwrap()
};
std::fs::write("assets/prefab/kamikaze.json", base_enemy);
}
fn gen_base_enemy_2() {
let base_enemy = {
let scale = 24.0;
let base_path = get_assets_path();
let mut emitter: ParticleEmitter = serde_json::from_str(
&std::fs::read_to_string(base_path.join("particle/enemy_trail.json")).unwrap(),
)
.unwrap();
let enemy_prefab = EnemyPrefab {
animation: None,
dynamic_body: DynamicBody {
impulses: vec![],
forces: vec![],
velocity: glam::Vec2::zero(),
max_velocity: 300.0,
mass: 1.0,
max_force: 500.0,
},
transform: Transform {
translation: Default::default(),
scale: scale * glam::Vec2::one(),
rotation: 0.0,
dirty: false,
},
sprite: Sprite {
id: "metalic_06.png".to_string(),
},
bounding_box: BoundingBox {
half_extend: scale / 2.0 * glam::Vec2::one(),
collision_layer: CollisionLayer::ENEMY,
collision_mask: None,
},
health: Some(Health::new(3.0, Timer::of_seconds(0.5))),
shield: None,
enemy: Enemy {
enemy_type: EnemyType::FollowPlayer(Timer::of_seconds(1.0)),
scrap_drop: (30, 60),
pickup_drop_percent: 5,
movement: MovementBehavior::Follow,
},
trail: Some(emitter),
};
let prefab = &enemy_prefab as &dyn Prefab;
serde_json::to_string_pretty(prefab).unwrap()
};
std::fs::write("assets/prefab/base_enemy_2.json", base_enemy);
}
fn gen_base_enemy_3() {
let base_enemy = {
let scale = 24.0;
let base_path = get_assets_path();
let mut emitter: ParticleEmitter = serde_json::from_str(
&std::fs::read_to_string(base_path.join("particle/enemy_trail.json")).unwrap(),
)
.unwrap();
let enemy_prefab = EnemyPrefab {
animation: None,
dynamic_body: DynamicBody {
impulses: vec![],
forces: vec![],
velocity: glam::Vec2::zero(),
max_velocity: 400.0,
mass: 1.0,
max_force: 1000.0,
},
transform: Transform {
translation: Default::default(),
scale: scale * glam::Vec2::one(),
rotation: 0.0,
dirty: false,
},
sprite: Sprite {
id: "darkgrey_04.png".to_string(),
},
bounding_box: BoundingBox {
half_extend: scale / 2.0 * glam::Vec2::one(),
collision_layer: CollisionLayer::ENEMY,
collision_mask: None,
},
health: Some(Health::new(3.0, Timer::of_seconds(0.5))),
shield: None,
enemy: Enemy {
enemy_type: EnemyType::FollowPlayer(Timer::of_seconds(1.0)),
scrap_drop: (50, 90),
pickup_drop_percent: 10,
movement: MovementBehavior::Follow,
},
trail: Some(emitter),
};
let prefab = &enemy_prefab as &dyn Prefab;
serde_json::to_string_pretty(prefab).unwrap()
};
std::fs::write("assets/prefab/base_enemy_3.json", base_enemy);
}
fn gen_spammer() {
let spammer = {
let scale = 32.0;
let enemy_prefab = EnemyPrefab {
animation: None,
dynamic_body: DynamicBody {
impulses: vec![],
forces: vec![],
velocity: glam::Vec2::zero(),
max_velocity: 100.0,
mass: 5.0,
max_force: 500.0,
},
transform: Transform {
translation: Default::default(),
scale: scale * glam::Vec2::one(),
rotation: 0.0,
dirty: false,
},
sprite: Sprite {
id: "green_04.png".to_string(),
},
bounding_box: BoundingBox {
half_extend: scale / 2.0 * glam::Vec2::one(),
collision_layer: CollisionLayer::ENEMY,
collision_mask: None,
},
health: Some(Health::new(3.0, Timer::of_seconds(1.0))),
shield: None,
enemy: Enemy {
enemy_type: EnemyType::Spammer(Spammer {
shoot_timer: Timer::of_seconds(1.0),
nb_shot: 3,
current_shot: 0,
salve_timer: Timer::of_seconds(6.0),
}),
scrap_drop: (20, 70),
pickup_drop_percent: 10,
movement: MovementBehavior::Follow,
},
trail: None,
};
let prefab = &enemy_prefab as &dyn Prefab;
serde_json::to_string_pretty(prefab).unwrap()
};
std::fs::write("assets/prefab/spammer.json", spammer);
}
fn gen_last_boss() {
let boss = {
let scale = 64.0;
let enemy_prefab = EnemyPrefab {
animation: None,
dynamic_body: DynamicBody {
impulses: vec![],
forces: vec![],
velocity: glam::Vec2::zero(),
max_velocity: 1500.0,
mass: 10.0,
max_force: 500.0,
},
transform: Transform {
translation: Default::default(),
scale: scale * glam::Vec2::one(),
rotation: 0.0,
dirty: false,
},
sprite: Sprite {
id: "large_purple_01.png".to_string(),
},
bounding_box: BoundingBox {
half_extend: scale / 2.0 * glam::Vec2::one(),
collision_layer: CollisionLayer::ENEMY,
collision_mask: None,
},
health: Some(Health::new(20.0, Timer::of_seconds(0.5))),
shield: None,
enemy: Enemy {
enemy_type: EnemyType::LastBoss(LastBoss {
shoot_timer: Timer::of_seconds(0.3),
nb_shot: 10,
current_shot: 0,
salve_timer: Timer::of_seconds(5.0),
}),
scrap_drop: (20, 100),
pickup_drop_percent: 100,
movement: MovementBehavior::RandomPath(glam::Vec2::zero(), false),
},
trail: None,
};
let prefab = &enemy_prefab as &dyn Prefab;
serde_json::to_string_pretty(prefab).unwrap()
};
std::fs::write("assets/prefab/last_boss.json", boss);
}
fn main() {
gen_wanderer();
gen_player();
gen_mine_lander();
gen_mine();
gen_base_enemy();
gen_base_enemy_2();
gen_base_enemy_3();
gen_spammer();
gen_kamikaze();
gen_carrier();
gen_last_boss();
let satellite = {
let scale = 40.0;
let enemy_prefab = EnemyPrefab {
animation: None,
dynamic_body: DynamicBody {
impulses: vec![],
forces: vec![],
velocity: glam::Vec2::zero(),
max_velocity: 0.0,
mass: 1.0,
max_force: 500.0,
},
transform: Transform {
translation: Default::default(),
scale: scale * glam::Vec2::one(),
rotation: 0.0,
dirty: false,
},
sprite: Sprite {
id: "sat.png".to_string(),
},
bounding_box: BoundingBox {
half_extend: scale / 2.0 * glam::Vec2::one(),
collision_layer: CollisionLayer::ENEMY,
collision_mask: None,
},
health: Some(Health::new(3.0, Timer::of_seconds(1.0))),
shield: None,
enemy: Enemy {
enemy_type: EnemyType::Satellite(Satellite {
shoot_timer: Timer::of_seconds(3.0),
shoot_distance: 800.0,
}),
scrap_drop: (10, 40),
pickup_drop_percent: 2,
movement: MovementBehavior::Follow,
},
trail: None,
};
let prefab = &enemy_prefab as &dyn Prefab;
serde_json::to_string_pretty(prefab).unwrap()
};
std::fs::write("assets/prefab/satellite.json", satellite);
//
let boss = {
let scale = 64.0;
let enemy_prefab = EnemyPrefab {
animation: None,
dynamic_body: DynamicBody {
impulses: vec![],
forces: vec![],
velocity: glam::Vec2::zero(),
max_velocity: 500.0,
mass: 10.0,
max_force: 500.0,
},
transform: Transform {
translation: Default::default(),
scale: scale * glam::Vec2::one(),
rotation: 0.0,
dirty: false,
},
sprite: Sprite {
id: "large_grey_02.png".to_string(),
},
bounding_box: BoundingBox {
half_extend: scale / 2.0 * glam::Vec2::one(),
collision_layer: CollisionLayer::ENEMY,
collision_mask: None,
},
health: Some(Health::new(10.0, Timer::of_seconds(0.5))),
shield: None,
enemy: Enemy {
enemy_type: EnemyType::Boss1(Boss1 {
shoot_timer: Timer::of_seconds(0.3),
nb_shot: 20,
current_shot: 0,
salve_timer: Timer::of_seconds(5.0),
}),
scrap_drop: (20, 100),
pickup_drop_percent: 100,
movement: MovementBehavior::Follow,
},
trail: None,
};
let prefab = &enemy_prefab as &dyn Prefab;
serde_json::to_string_pretty(prefab).unwrap()
};
std::fs::write("assets/prefab/boss1.json", boss);
}
|
use std;
use std::convert::TryFrom;
use std::io::{self, BufReader, Write};
use std::marker::PhantomData;
use std::net::{Ipv4Addr, SocketAddr};
use crate::Tagged;
use async_bincode::{AsyncBincodeStream, AsyncBincodeWriter, AsyncDestination};
use bincode;
use bufstream::BufStream;
use byteorder::{NetworkEndian, WriteBytesExt};
use mio::{self, Evented, Poll, PollOpt, Ready, Token};
use net2;
use serde::{Deserialize, Serialize};
use tokio::prelude::*;
use super::{DeserializeReceiver, NonBlockingWriter, ReceiveError};
#[derive(Debug, Fail)]
pub enum SendError {
#[fail(display = "{}", _0)]
BincodeError(#[cause] bincode::Error),
#[fail(display = "{}", _0)]
IoError(#[cause] io::Error),
#[fail(display = "channel has previously encountered an error")]
Poisoned,
}
impl From<bincode::Error> for SendError {
fn from(e: bincode::Error) -> Self {
SendError::BincodeError(e)
}
}
impl From<io::Error> for SendError {
fn from(e: io::Error) -> Self {
SendError::IoError(e)
}
}
macro_rules! poisoning_try {
($self_:ident, $e:expr) => {
match $e {
Ok(v) => v,
Err(r) => {
$self_.poisoned = true;
return Err(r.into());
}
}
};
}
pub struct TcpSender<T> {
stream: BufStream<std::net::TcpStream>,
poisoned: bool,
phantom: PhantomData<T>,
}
impl<T: Serialize> TcpSender<T> {
pub fn new(stream: std::net::TcpStream) -> Result<Self, io::Error> {
stream.set_nodelay(true).unwrap();
Ok(Self {
stream: BufStream::new(stream),
poisoned: false,
phantom: PhantomData,
})
}
pub(crate) fn connect_from(sport: Option<u16>, addr: &SocketAddr) -> Result<Self, io::Error> {
let s = net2::TcpBuilder::new_v4()?
.reuse_address(true)?
.bind((Ipv4Addr::UNSPECIFIED, sport.unwrap_or(0)))?
.connect(addr)?;
s.set_nodelay(true)?;
Self::new(s)
}
pub fn connect(addr: &SocketAddr) -> Result<Self, io::Error> {
Self::connect_from(None, addr)
}
pub fn get_mut(&mut self) -> &mut BufStream<std::net::TcpStream> {
&mut self.stream
}
pub(crate) fn into_inner(self) -> BufStream<std::net::TcpStream> {
self.stream
}
pub fn local_addr(&self) -> io::Result<SocketAddr> {
self.stream.get_ref().local_addr()
}
pub fn peer_addr(&self) -> io::Result<SocketAddr> {
self.stream.get_ref().peer_addr()
}
/// Send a message on this channel. Ownership isn't actually required, but is taken anyway to
/// conform to the same api as mpsc::Sender.
pub fn send(&mut self, t: T) -> Result<(), SendError> {
self.send_ref(&t)
}
pub fn send_ref(&mut self, t: &T) -> Result<(), SendError> {
if self.poisoned {
return Err(SendError::Poisoned);
}
let size = u32::try_from(bincode::serialized_size(t).unwrap()).unwrap();
poisoning_try!(self, self.stream.write_u32::<NetworkEndian>(size));
poisoning_try!(self, bincode::serialize_into(&mut self.stream, t));
poisoning_try!(self, self.stream.flush());
Ok(())
}
pub fn reader<'a>(&'a mut self) -> impl io::Read + 'a {
&mut self.stream
}
}
impl<T: Serialize> super::Sender for TcpSender<T> {
type Item = T;
fn send(&mut self, t: T) -> Result<(), SendError> {
self.send_ref(&t)
}
}
#[derive(Debug)]
pub enum TryRecvError {
Empty,
Disconnected,
DeserializationError(bincode::Error),
}
#[derive(Debug)]
pub enum RecvError {
Disconnected,
DeserializationError(bincode::Error),
}
pub enum DualTcpStream<S, T, T2, D> {
Passthrough(AsyncBincodeStream<S, T, Tagged<()>, D>),
Upgrade(
AsyncBincodeStream<S, T2, Tagged<()>, D>,
Box<dyn FnMut(T2) -> T + Send + Sync>,
),
}
impl<S, T, T2> From<S> for DualTcpStream<S, T, T2, AsyncDestination> {
fn from(stream: S) -> Self {
DualTcpStream::Passthrough(AsyncBincodeStream::from(stream).for_async())
}
}
impl<S, T, T2> DualTcpStream<S, T, T2, AsyncDestination> {
pub fn upgrade<F: 'static + FnMut(T2) -> T + Send + Sync>(stream: S, f: F) -> Self {
let s: AsyncBincodeStream<S, T2, Tagged<()>, AsyncDestination> =
AsyncBincodeStream::from(stream).for_async();
DualTcpStream::Upgrade(s, Box::new(f))
}
pub fn get_ref(&self) -> &S {
match *self {
DualTcpStream::Passthrough(ref abs) => abs.get_ref(),
DualTcpStream::Upgrade(ref abs, _) => abs.get_ref(),
}
}
}
impl<S, T, T2, D> Sink for DualTcpStream<S, T, T2, D>
where
S: AsyncWrite,
AsyncBincodeWriter<S, Tagged<()>, D>: Sink<SinkItem = Tagged<()>, SinkError = bincode::Error>,
{
type SinkItem = Tagged<()>;
type SinkError = bincode::Error;
fn start_send(
&mut self,
item: Self::SinkItem,
) -> Result<AsyncSink<Self::SinkItem>, Self::SinkError> {
match *self {
DualTcpStream::Passthrough(ref mut abs) => abs.start_send(item),
DualTcpStream::Upgrade(ref mut abs, _) => abs.start_send(item),
}
}
fn poll_complete(&mut self) -> Result<Async<()>, Self::SinkError> {
match *self {
DualTcpStream::Passthrough(ref mut abs) => abs.poll_complete(),
DualTcpStream::Upgrade(ref mut abs, _) => abs.poll_complete(),
}
}
}
impl<S, T, T2, D> Stream for DualTcpStream<S, T, T2, D>
where
for<'a> T: Deserialize<'a>,
for<'a> T2: Deserialize<'a>,
S: AsyncRead,
{
type Item = T;
type Error = bincode::Error;
fn poll(&mut self) -> Result<Async<Option<Self::Item>>, Self::Error> {
// https://github.com/rust-lang/rust-clippy/issues/3071
#[allow(clippy::redundant_closure)]
match *self {
DualTcpStream::Passthrough(ref mut abr) => abr.poll(),
DualTcpStream::Upgrade(ref mut abr, ref mut upgrade) => match abr.poll() {
Ok(Async::Ready(x)) => Ok(Async::Ready(x.map(|x| upgrade(x)))),
Ok(Async::NotReady) => Ok(Async::NotReady),
Err(e) => Err(e),
},
}
}
}
pub struct TcpReceiver<T> {
pub(crate) stream: BufReader<NonBlockingWriter<mio::net::TcpStream>>,
poisoned: bool,
deserialize_receiver: DeserializeReceiver<T>,
phantom: PhantomData<T>,
}
impl<T> TcpReceiver<T>
where
for<'de> T: Deserialize<'de>,
{
pub fn new(stream: mio::net::TcpStream) -> Self {
Self::new_inner(None, stream)
}
pub fn with_capacity(cap: usize, stream: mio::net::TcpStream) -> Self {
Self::new_inner(Some(cap), stream)
}
fn new_inner(cap: Option<usize>, stream: mio::net::TcpStream) -> Self {
stream.set_nodelay(true).unwrap(); // for acks
let stream = NonBlockingWriter::new(stream);
let stream = if let Some(cap) = cap {
BufReader::with_capacity(cap, stream)
} else {
BufReader::new(stream)
};
Self {
stream,
poisoned: false,
deserialize_receiver: DeserializeReceiver::new(),
phantom: PhantomData,
}
}
pub fn get_ref(&self) -> &mio::net::TcpStream {
&*self.stream.get_ref().get_ref()
}
pub fn listen(addr: &SocketAddr) -> Result<Self, io::Error> {
let listener = mio::net::TcpListener::bind(addr)?;
Ok(Self::new(listener.accept()?.0))
}
pub fn local_addr(&self) -> Result<SocketAddr, io::Error> {
self.stream.get_ref().get_ref().local_addr()
}
pub fn is_empty(&self) -> bool {
self.stream.buffer().is_empty()
}
pub fn try_recv(&mut self) -> Result<T, TryRecvError> {
if self.poisoned {
return Err(TryRecvError::Disconnected);
}
match self.deserialize_receiver.try_recv(&mut self.stream) {
Ok(msg) => Ok(msg),
Err(ReceiveError::WouldBlock) => Err(TryRecvError::Empty),
Err(ReceiveError::IoError(_)) => {
self.poisoned = true;
Err(TryRecvError::Disconnected)
}
Err(ReceiveError::DeserializationError(e)) => {
self.poisoned = true;
Err(TryRecvError::DeserializationError(e))
}
}
}
pub fn recv(&mut self) -> Result<T, RecvError> {
loop {
return match self.try_recv() {
Err(TryRecvError::Empty) => continue,
Err(TryRecvError::Disconnected) => Err(RecvError::Disconnected),
Err(TryRecvError::DeserializationError(e)) => {
Err(RecvError::DeserializationError(e))
}
Ok(t) => Ok(t),
};
}
}
}
impl<T> Evented for TcpReceiver<T> {
fn register(
&self,
poll: &Poll,
token: Token,
interest: Ready,
opts: PollOpt,
) -> io::Result<()> {
self.stream
.get_ref()
.get_ref()
.register(poll, token, interest, opts)
}
fn reregister(
&self,
poll: &Poll,
token: Token,
interest: Ready,
opts: PollOpt,
) -> io::Result<()> {
self.stream
.get_ref()
.get_ref()
.reregister(poll, token, interest, opts)
}
fn deregister(&self, poll: &Poll) -> io::Result<()> {
self.stream.get_ref().get_ref().deregister(poll)
}
}
fn connect(listen_addr: SocketAddr) -> (std::net::TcpStream, mio::net::TcpStream) {
let listener = std::net::TcpListener::bind(&listen_addr).unwrap();
let rx = mio::net::TcpStream::connect(&listener.local_addr().unwrap()).unwrap();
let tx = listener.accept().unwrap().0;
(tx, rx)
}
pub fn channel<T: Serialize>(listen_addr: SocketAddr) -> (TcpSender<T>, TcpReceiver<T>)
where
for<'de> T: Deserialize<'de>,
{
let (tx, rx) = connect(listen_addr);
let tx = TcpSender::new(tx).unwrap();
let rx = TcpReceiver::new(rx);
(tx, rx)
}
#[cfg(test)]
mod tests {
use super::*;
use mio::Events;
use std::thread;
#[test]
fn it_works() {
let (mut sender, mut receiver) = channel::<u32>("127.0.0.1:0".parse().unwrap());
sender.send(12).unwrap();
assert_eq!(receiver.recv().unwrap(), 12);
sender.send(65).unwrap();
sender.send(13).unwrap();
assert_eq!(receiver.recv().unwrap(), 65);
assert_eq!(receiver.recv().unwrap(), 13);
}
#[test]
fn multithread() {
let (mut sender, mut receiver) = channel::<u32>("127.0.0.1:0".parse().unwrap());
let t1 = thread::spawn(move || {
sender.send(12).unwrap();
sender.send(65).unwrap();
sender.send(13).unwrap();
});
let t2 = thread::spawn(move || {
assert_eq!(receiver.recv().unwrap(), 12);
assert_eq!(receiver.recv().unwrap(), 65);
assert_eq!(receiver.recv().unwrap(), 13);
});
t1.join().unwrap();
t2.join().unwrap();
}
#[test]
fn poll() {
let (mut sender, mut receiver) = channel::<u32>("127.0.0.1:0".parse().unwrap());
let t1 = thread::spawn(move || {
sender.send(12).unwrap();
sender.send(65).unwrap();
sender.send(13).unwrap();
});
let t2 = thread::spawn(move || {
let poll = Poll::new().unwrap();
let mut events = Events::with_capacity(128);
poll.register(&receiver, Token(0), Ready::readable(), PollOpt::level())
.unwrap();
poll.poll(&mut events, None).unwrap();
let mut events = events.into_iter();
assert_eq!(events.next().unwrap().token(), Token(0));
assert_eq!(receiver.recv().unwrap(), 12);
assert_eq!(receiver.recv().unwrap(), 65);
assert_eq!(receiver.recv().unwrap(), 13);
});
t1.join().unwrap();
t2.join().unwrap();
}
#[test]
fn ping_pong() {
let (mut sender, mut receiver) = channel::<u32>("127.0.0.1:0".parse().unwrap());
let (mut sender2, mut receiver2) = channel::<u32>("127.0.0.1:0".parse().unwrap());
let t1 = thread::spawn(move || {
let poll = Poll::new().unwrap();
let mut events = Events::with_capacity(128);
poll.register(&receiver, Token(0), Ready::readable(), PollOpt::level())
.unwrap();
poll.poll(&mut events, None).unwrap();
assert_eq!(events.iter().next().unwrap().token(), Token(0));
assert_eq!(receiver.recv().unwrap(), 15);
sender2.send(12).unwrap();
poll.poll(&mut events, None).unwrap();
assert_eq!(events.iter().next().unwrap().token(), Token(0));
assert_eq!(receiver.recv().unwrap(), 54);
sender2.send(65).unwrap();
});
let t2 = thread::spawn(move || {
let poll = Poll::new().unwrap();
let mut events = Events::with_capacity(128);
poll.register(&receiver2, Token(0), Ready::readable(), PollOpt::level())
.unwrap();
sender.send(15).unwrap();
poll.poll(&mut events, None).unwrap();
assert_eq!(events.iter().next().unwrap().token(), Token(0));
assert_eq!(receiver2.recv().unwrap(), 12);
sender.send(54).unwrap();
poll.poll(&mut events, None).unwrap();
assert_eq!(events.iter().next().unwrap().token(), Token(0));
assert_eq!(receiver2.recv().unwrap(), 65);
});
t1.join().unwrap();
t2.join().unwrap();
}
}
|
use input_i_scanner::InputIScanner;
macro_rules! chmin {
($a: expr, $b: expr) => {
$a = std::cmp::min($a, $b);
};
}
fn main() {
let stdin = std::io::stdin();
let mut _i_i = InputIScanner::from(stdin.lock());
macro_rules! scan {
(($($t: ty),+)) => {
($(scan!($t)),+)
};
($t: ty) => {
_i_i.scan::<$t>() as $t
};
(($($t: ty),+); $n: expr) => {
std::iter::repeat_with(|| scan!(($($t),+))).take($n).collect::<Vec<_>>()
};
($t: ty; $n: expr) => {
std::iter::repeat_with(|| scan!($t)).take($n).collect::<Vec<_>>()
};
}
let (h, w, k) = scan!((usize, usize, usize));
let mut a = vec![vec![]; h];
for i in 0..h {
a[i] = scan!(u64; w);
}
let a = a;
let mut coordinates = Vec::new();
for i in 0..h {
for j in 0..w {
coordinates.push((i, j));
}
}
coordinates.sort_by_key(|&(i, j)| a[i][j]);
coordinates.reverse();
let mut ok = vec![vec![false; w]; h];
let inf = std::u64::MAX / 2;
let mut ans = inf;
for (ii, jj) in coordinates {
ok[ii][jj] = true;
let mut dp = vec![vec![vec![inf; k + 1]; w]; h];
if ok[0][0] {
dp[0][0][1] = a[0][0];
} else {
dp[0][0][0] = 0;
}
for i in 0..h {
for j in 0..w {
for t in 0..=k {
if i >= 1 {
if ok[i][j] {
if t >= 1 {
chmin!(dp[i][j][t], dp[i - 1][j][t - 1] + a[i][j]);
}
} else {
chmin!(dp[i][j][t], dp[i - 1][j][t]);
}
}
if j >= 1 {
if ok[i][j] {
if t >= 1 {
chmin!(dp[i][j][t], dp[i][j - 1][t - 1] + a[i][j]);
}
} else {
chmin!(dp[i][j][t], dp[i][j - 1][t]);
}
}
}
}
}
chmin!(ans, dp[h - 1][w - 1][k]);
}
println!("{}", ans);
}
|
pub const INPUT: &str = include_str!("../input.txt");
pub fn solve(input: &str) -> (u32, u32) {
let mut part_one_sum = 0;
let mut part_two_sum = 0;
let iter = input.as_bytes().chunks_exact(4); // 4 bytes per line "A X\n"
for chunk in iter {
let (part_one_score, part_two_score) = match (chunk[0], chunk[2]) {
// Opponent: Paper, part one: Rock(1) -> Lose(0), part two: Lose(0) -> Rock(1)
(b'B', b'X') => (1, 1),
// Opponent: Scissors, part one: Paper(2) -> Lose(0), part two: Draw(3) -> Scissors(3)
(b'C', b'Y') => (2, 6),
// Opponent: Rock, part one: Scissors(3) -> Lose(0), part two: Win(6) -> Paper(2)
(b'A', b'Z') => (3, 8),
// Opponent: Rock, part one: Rock(1) -> Draw(3), part two: Lose(0) -> Scissors(3)
(b'A', b'X') => (4, 3),
// Opponent: Paper, part one: Paper(2) -> Draw(3), part two: Draw(3) -> Paper(2)
(b'B', b'Y') => (5, 5),
// Opponent: Scissors, part one: Scissors(3) -> Draw(3), part two: Win(6) -> Rock(1)
(b'C', b'Z') => (6, 7),
// Opponent: Scissors, part one: Rock(1) -> Win(6), part two: Lose(0) -> Paper(2)
(b'C', b'X') => (7, 2),
// Opponent: Rock, part one: Paper(2) -> Win(6), part two: Draw(3) -> Rock(1)
(b'A', b'Y') => (8, 4),
// Opponent: Paper, part one: Scissors(3) -> Win(6), part two: Win(6) -> Scissors(3)
(b'B', b'Z') => (9, 9),
_ => panic!("Invalid round"),
};
part_one_sum += part_one_score;
part_two_sum += part_two_score;
}
(part_one_sum, part_two_sum)
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_solve() {
assert_eq!(solve(INPUT), (14_531, 11_258));
}
}
|
#[doc = "Reader of register CTL"]
pub type R = crate::R<u32, super::CTL>;
#[doc = "Writer for register CTL"]
pub type W = crate::W<u32, super::CTL>;
#[doc = "Register CTL `reset()`'s with value 0"]
impl crate::ResetValue for super::CTL {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `ENABLE`"]
pub type ENABLE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ENABLE`"]
pub struct ENABLE_W<'a> {
w: &'a mut W,
}
impl<'a> ENABLE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Reader of field `SWAP`"]
pub type SWAP_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SWAP`"]
pub struct SWAP_W<'a> {
w: &'a mut W,
}
impl<'a> SWAP_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `SIGMODE`"]
pub type SIGMODE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SIGMODE`"]
pub struct SIGMODE_W<'a> {
w: &'a mut W,
}
impl<'a> SIGMODE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `CAPMODE`"]
pub type CAPMODE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CAPMODE`"]
pub struct CAPMODE_W<'a> {
w: &'a mut W,
}
impl<'a> CAPMODE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `RESMODE`"]
pub type RESMODE_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RESMODE`"]
pub struct RESMODE_W<'a> {
w: &'a mut W,
}
impl<'a> RESMODE_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `VELEN`"]
pub type VELEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `VELEN`"]
pub struct VELEN_W<'a> {
w: &'a mut W,
}
impl<'a> VELEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Predivide Velocity\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum VELDIV_A {
#[doc = "0: QEI clock /1"]
_1 = 0,
#[doc = "1: QEI clock /2"]
_2 = 1,
#[doc = "2: QEI clock /4"]
_4 = 2,
#[doc = "3: QEI clock /8"]
_8 = 3,
#[doc = "4: QEI clock /16"]
_16 = 4,
#[doc = "5: QEI clock /32"]
_32 = 5,
#[doc = "6: QEI clock /64"]
_64 = 6,
#[doc = "7: QEI clock /128"]
_128 = 7,
}
impl From<VELDIV_A> for u8 {
#[inline(always)]
fn from(variant: VELDIV_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `VELDIV`"]
pub type VELDIV_R = crate::R<u8, VELDIV_A>;
impl VELDIV_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> VELDIV_A {
match self.bits {
0 => VELDIV_A::_1,
1 => VELDIV_A::_2,
2 => VELDIV_A::_4,
3 => VELDIV_A::_8,
4 => VELDIV_A::_16,
5 => VELDIV_A::_32,
6 => VELDIV_A::_64,
7 => VELDIV_A::_128,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
*self == VELDIV_A::_1
}
#[doc = "Checks if the value of the field is `_2`"]
#[inline(always)]
pub fn is_2(&self) -> bool {
*self == VELDIV_A::_2
}
#[doc = "Checks if the value of the field is `_4`"]
#[inline(always)]
pub fn is_4(&self) -> bool {
*self == VELDIV_A::_4
}
#[doc = "Checks if the value of the field is `_8`"]
#[inline(always)]
pub fn is_8(&self) -> bool {
*self == VELDIV_A::_8
}
#[doc = "Checks if the value of the field is `_16`"]
#[inline(always)]
pub fn is_16(&self) -> bool {
*self == VELDIV_A::_16
}
#[doc = "Checks if the value of the field is `_32`"]
#[inline(always)]
pub fn is_32(&self) -> bool {
*self == VELDIV_A::_32
}
#[doc = "Checks if the value of the field is `_64`"]
#[inline(always)]
pub fn is_64(&self) -> bool {
*self == VELDIV_A::_64
}
#[doc = "Checks if the value of the field is `_128`"]
#[inline(always)]
pub fn is_128(&self) -> bool {
*self == VELDIV_A::_128
}
}
#[doc = "Write proxy for field `VELDIV`"]
pub struct VELDIV_W<'a> {
w: &'a mut W,
}
impl<'a> VELDIV_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: VELDIV_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "QEI clock /1"]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(VELDIV_A::_1)
}
#[doc = "QEI clock /2"]
#[inline(always)]
pub fn _2(self) -> &'a mut W {
self.variant(VELDIV_A::_2)
}
#[doc = "QEI clock /4"]
#[inline(always)]
pub fn _4(self) -> &'a mut W {
self.variant(VELDIV_A::_4)
}
#[doc = "QEI clock /8"]
#[inline(always)]
pub fn _8(self) -> &'a mut W {
self.variant(VELDIV_A::_8)
}
#[doc = "QEI clock /16"]
#[inline(always)]
pub fn _16(self) -> &'a mut W {
self.variant(VELDIV_A::_16)
}
#[doc = "QEI clock /32"]
#[inline(always)]
pub fn _32(self) -> &'a mut W {
self.variant(VELDIV_A::_32)
}
#[doc = "QEI clock /64"]
#[inline(always)]
pub fn _64(self) -> &'a mut W {
self.variant(VELDIV_A::_64)
}
#[doc = "QEI clock /128"]
#[inline(always)]
pub fn _128(self) -> &'a mut W {
self.variant(VELDIV_A::_128)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 6)) | (((value as u32) & 0x07) << 6);
self.w
}
}
#[doc = "Reader of field `INVA`"]
pub type INVA_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INVA`"]
pub struct INVA_W<'a> {
w: &'a mut W,
}
impl<'a> INVA_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);
self.w
}
}
#[doc = "Reader of field `INVB`"]
pub type INVB_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INVB`"]
pub struct INVB_W<'a> {
w: &'a mut W,
}
impl<'a> INVB_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `INVI`"]
pub type INVI_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INVI`"]
pub struct INVI_W<'a> {
w: &'a mut W,
}
impl<'a> INVI_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Reader of field `STALLEN`"]
pub type STALLEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `STALLEN`"]
pub struct STALLEN_W<'a> {
w: &'a mut W,
}
impl<'a> STALLEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "Reader of field `FILTEN`"]
pub type FILTEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FILTEN`"]
pub struct FILTEN_W<'a> {
w: &'a mut W,
}
impl<'a> FILTEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);
self.w
}
}
#[doc = "Reader of field `FILTCNT`"]
pub type FILTCNT_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `FILTCNT`"]
pub struct FILTCNT_W<'a> {
w: &'a mut W,
}
impl<'a> FILTCNT_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 16)) | (((value as u32) & 0x0f) << 16);
self.w
}
}
impl R {
#[doc = "Bit 0 - Enable QEI"]
#[inline(always)]
pub fn enable(&self) -> ENABLE_R {
ENABLE_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Swap Signals"]
#[inline(always)]
pub fn swap(&self) -> SWAP_R {
SWAP_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Signal Mode"]
#[inline(always)]
pub fn sigmode(&self) -> SIGMODE_R {
SIGMODE_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Capture Mode"]
#[inline(always)]
pub fn capmode(&self) -> CAPMODE_R {
CAPMODE_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Reset Mode"]
#[inline(always)]
pub fn resmode(&self) -> RESMODE_R {
RESMODE_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Capture Velocity"]
#[inline(always)]
pub fn velen(&self) -> VELEN_R {
VELEN_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bits 6:8 - Predivide Velocity"]
#[inline(always)]
pub fn veldiv(&self) -> VELDIV_R {
VELDIV_R::new(((self.bits >> 6) & 0x07) as u8)
}
#[doc = "Bit 9 - Invert PhA"]
#[inline(always)]
pub fn inva(&self) -> INVA_R {
INVA_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - Invert PhB"]
#[inline(always)]
pub fn invb(&self) -> INVB_R {
INVB_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - Invert Index Pulse"]
#[inline(always)]
pub fn invi(&self) -> INVI_R {
INVI_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - Stall QEI"]
#[inline(always)]
pub fn stallen(&self) -> STALLEN_R {
STALLEN_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 13 - Enable Input Filter"]
#[inline(always)]
pub fn filten(&self) -> FILTEN_R {
FILTEN_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bits 16:19 - Input Filter Prescale Count"]
#[inline(always)]
pub fn filtcnt(&self) -> FILTCNT_R {
FILTCNT_R::new(((self.bits >> 16) & 0x0f) as u8)
}
}
impl W {
#[doc = "Bit 0 - Enable QEI"]
#[inline(always)]
pub fn enable(&mut self) -> ENABLE_W {
ENABLE_W { w: self }
}
#[doc = "Bit 1 - Swap Signals"]
#[inline(always)]
pub fn swap(&mut self) -> SWAP_W {
SWAP_W { w: self }
}
#[doc = "Bit 2 - Signal Mode"]
#[inline(always)]
pub fn sigmode(&mut self) -> SIGMODE_W {
SIGMODE_W { w: self }
}
#[doc = "Bit 3 - Capture Mode"]
#[inline(always)]
pub fn capmode(&mut self) -> CAPMODE_W {
CAPMODE_W { w: self }
}
#[doc = "Bit 4 - Reset Mode"]
#[inline(always)]
pub fn resmode(&mut self) -> RESMODE_W {
RESMODE_W { w: self }
}
#[doc = "Bit 5 - Capture Velocity"]
#[inline(always)]
pub fn velen(&mut self) -> VELEN_W {
VELEN_W { w: self }
}
#[doc = "Bits 6:8 - Predivide Velocity"]
#[inline(always)]
pub fn veldiv(&mut self) -> VELDIV_W {
VELDIV_W { w: self }
}
#[doc = "Bit 9 - Invert PhA"]
#[inline(always)]
pub fn inva(&mut self) -> INVA_W {
INVA_W { w: self }
}
#[doc = "Bit 10 - Invert PhB"]
#[inline(always)]
pub fn invb(&mut self) -> INVB_W {
INVB_W { w: self }
}
#[doc = "Bit 11 - Invert Index Pulse"]
#[inline(always)]
pub fn invi(&mut self) -> INVI_W {
INVI_W { w: self }
}
#[doc = "Bit 12 - Stall QEI"]
#[inline(always)]
pub fn stallen(&mut self) -> STALLEN_W {
STALLEN_W { w: self }
}
#[doc = "Bit 13 - Enable Input Filter"]
#[inline(always)]
pub fn filten(&mut self) -> FILTEN_W {
FILTEN_W { w: self }
}
#[doc = "Bits 16:19 - Input Filter Prescale Count"]
#[inline(always)]
pub fn filtcnt(&mut self) -> FILTCNT_W {
FILTCNT_W { w: self }
}
}
|
//! Macros for logging.
use super::symbol;
use super::context::Context;
use std::ffi::CString;
pub enum Level {
Debug = 0,
Info = 1,
Warn = 2,
Error = 3,
}
#[repr(C)]
pub(crate) struct Metadata {
pub level: Level,
pub file: &'static str,
pub line: u32,
pub column: u32,
}
pub fn write_log(
ctx: &mut Context,
msg: String,
level: Level,
file: &'static str,
line: u32,
column: u32,
) {
unsafe {
symbol::Logger_log.unwrap()(
ctx,
CString::new(msg).unwrap().as_ptr(),
&Metadata {
level: level,
file: file,
line: line,
column: column,
},
);
}
}
/// Write debug-level log
#[macro_export]
macro_rules! debug {
($ctx:ident, $fmt:expr) =>
(::plugkit::logger::write_log($ctx, format!($fmt), ::plugkit::logger::Level::Debug,
file!(), line!(), column!()));
($ctx:ident, $fmt:expr, $($arg:tt)*) =>
(::plugkit::logger::write_log($ctx, format!($fmt, $($arg)*), ::plugkit::logger::Level::Debug,
file!(), line!(), column!()));
}
/// Write info-level log
#[macro_export]
macro_rules! info {
($ctx:ident, $fmt:expr) =>
(::plugkit::logger::write_log($ctx, format!($fmt), ::plugkit::logger::Level::Info,
file!(), line!(), column!()));
($ctx:ident, $fmt:expr, $($arg:tt)*) =>
(::plugkit::logger::write_log($ctx, format!($fmt, $($arg)*), ::plugkit::logger::Level::Info,
file!(), line!(), column!()));
}
/// Write warn-level log
#[macro_export]
macro_rules! warn {
($ctx:ident, $fmt:expr) =>
(::plugkit::logger::write_log($ctx, format!($fmt), ::plugkit::logger::Level::Warn,
file!(), line!(), column!()));
($ctx:ident, $fmt:expr, $($arg:tt)*) =>
(::plugkit::logger::write_log($ctx, format!($fmt, $($arg)*), ::plugkit::logger::Level::Warn,
file!(), line!(), column!()));
}
/// Write error-level log
#[macro_export]
macro_rules! error {
($ctx:ident, $fmt:expr) =>
(::plugkit::logger::write_log($ctx, format!($fmt), ::plugkit::logger::Level::Error,
file!(), line!(), column!()));
($ctx:ident, $fmt:expr, $($arg:tt)*) =>
(::plugkit::logger::write_log($ctx, format!($fmt, $($arg)*), ::plugkit::logger::Level::Error,
file!(), line!(), column!()));
}
|
#[derive(Debug,Clone)]
pub struct Type {
pub name: String,
pub category: String,
pub qualifier: Vec<String>,
pub implements: Vec<String>,
pub extends: Option<String>,
pub annotation: Vec<String>,
pub comment: Vec<String>,
pub member: Vec<Member>,
}
#[derive(Debug,Clone)]
pub struct Method {
pub name: String,
pub is_abstract: bool,
pub return_type: String,
pub input_type: Vec<String>,
pub qualifier: Vec<String>,
pub annotation: Vec<String>,
pub comment: Vec<String>,
}
#[derive(Debug,Clone)]
pub struct Field {
pub name: String,
pub data_type: String,
pub qualifier: Vec<String>,
pub annotation: Vec<String>,
pub comment: Vec<String>,
}
#[derive(Debug,Clone)]
pub struct FileHead {
pub comment: Vec<String>,
pub import: Vec<String>,
pub package: String,
}
#[derive(Debug,Clone)]
pub enum Member {
Type(Type),
Method(Method),
Field(Field),
FileHead(FileHead),
}
|
use super::writer::Writer;
use crate::MkvId;
use rand::Rng;
use std::io;
use std::io::{Error, ErrorKind};
pub const EBML_UNKNOWN_VALUE: u64 = 0x01FFFFFFFFFFFFFF;
pub const MAX_BLOCK_TIMECODE: i64 = 0x07FFF;
// Date elements are always 8 octets in size.
const DATE_ELEMENT_SIZE: i32 = 8;
const DOC_TYPE_WEBM: &'static str = "webm";
const DOC_TYPE_MATROSKA: &'static str = "matroska";
pub fn GetCodedUIntSize(value: u64) -> i32 {
if value < 0x000000000000007F {
return 1;
} else if value < 0x0000000000003FFF {
return 2;
} else if value < 0x00000000001FFFFF {
return 3;
} else if value < 0x000000000FFFFFFF {
return 4;
} else if value < 0x00000007FFFFFFFF {
return 5;
} else if value < 0x000003FFFFFFFFFF {
return 6;
} else if value < 0x0001FFFFFFFFFFFF {
return 7;
}
8
}
pub fn GetUIntSize(value: u64) -> i32 {
if value < 0x0000000000000100 {
return 1;
} else if value < 0x0000000000010000 {
return 2;
} else if value < 0x0000000001000000 {
return 3;
} else if value < 0x0000000100000000 {
return 4;
} else if value < 0x0000010000000000 {
return 5;
} else if value < 0x0001000000000000 {
return 6;
} else if value < 0x0100000000000000 {
return 7;
}
8
}
pub fn GetIntSize(value: i64) -> i32 {
// Doubling the requested value ensures positive values with their high bit
// set are written with 0-padding to avoid flipping the signedness.
let v: u64 = if value < 0 {
(value as u64) ^ 0xFFFFFFFFFFFFFFFF
} else {
value as u64
};
GetUIntSize(2 * v)
}
pub fn GetVersion(major: &mut i32, minor: &mut i32, build: &mut i32, revision: &mut i32) {
*major = 0;
*minor = 2;
*build = 1;
*revision = 0;
}
pub fn MakeUID() -> u64 {
let mut rng = rand::thread_rng();
let uid: u64 = rng.gen();
return uid;
}
pub fn SerializeInt(writer: &mut dyn Writer, value: u64, size: i32) -> io::Result<()> {
if size < 1 || size > 8 {
Err(Error::new(ErrorKind::Other, "size should be in [1,8]"))
} else {
let mut buffer = vec![0; size as usize];
for i in 1..=size {
let byte_count = size - i;
let bit_count = byte_count * 8;
let bb = value >> bit_count;
buffer[i as usize - 1] = bb as u8;
}
writer.write(&buffer)
}
}
pub fn SerializeFloat(writer: &mut dyn Writer, f: f32) -> io::Result<()> {
assert!(std::mem::size_of::<u32>() == std::mem::size_of::<f32>());
// This union is merely used to avoid a reinterpret_cast from float& to
// uint32& which will result in violation of strict aliasing.
#[repr(C)]
union U32 {
u: u32,
f: f32,
}
let value: U32 = U32 { f: f };
let mut buffer = vec![0; 4];
for i in 1..=4 {
let byte_count = 4 - i;
let bit_count = byte_count * 8;
let bb = unsafe { value.u >> bit_count };
buffer[i as usize - 1] = bb as u8;
}
writer.write(&buffer)
}
pub fn WriteUInt(writer: &mut dyn Writer, value: u64) -> io::Result<()> {
let size = GetCodedUIntSize(value);
WriteUIntSize(writer, value, size)
}
pub fn WriteUIntSize(writer: &mut dyn Writer, value: u64, size: i32) -> io::Result<()> {
if size < 0 || size > 8 {
return Err(Error::new(ErrorKind::Other, "size should be in [0,8]"));
}
let mut value = value;
let mut size = size;
if size > 0 {
let bit = (1 as u64) << (size * 7);
if value > (bit - 2) {
return Err(Error::new(ErrorKind::Other, "value should > bit-2"));
}
value |= bit;
} else {
size = 1;
let mut bit = 0;
loop {
bit = (1 as u64) << (size * 7);
let m = bit - 2;
if value <= m {
break;
}
size += 1;
}
if size > 8 {
return Err(Error::new(ErrorKind::Other, "size cannot > 8"));
}
value |= bit;
}
SerializeInt(writer, value, size)
}
pub fn WriteID(writer: &mut dyn Writer, t: MkvId) -> io::Result<()> {
writer.element_start_notify(t, writer.get_position());
let size = GetUIntSize(t as u64);
SerializeInt(writer, t as u64, size)
}
pub fn EbmlMasterElementSize(t: MkvId, _value: u64) -> u64 {
// Size of EBML ID
let mut ebml_size: i32 = GetUIntSize(t as u64);
// Datasize
ebml_size += GetCodedUIntSize(t as u64);
ebml_size as u64
}
pub fn WriteEbmlMasterElement(writer: &mut dyn Writer, t: MkvId, size: u64) -> bool {
if WriteID(writer, t).is_err() {
return false;
}
if WriteUInt(writer, size).is_err() {
return false;
}
true
}
pub fn EbmlDateElementSize(t: MkvId) -> u64 {
// Size of EBML ID
let mut ebml_size: u64 = GetUIntSize(t as u64) as u64;
// Datasize
ebml_size += DATE_ELEMENT_SIZE as u64;
// Size of Datasize
ebml_size += 1;
ebml_size
}
pub fn WriteEbmlDateElement(writer: &mut dyn Writer, t: MkvId, value: i64) -> bool {
if WriteID(writer, t).is_err() {
return false;
}
if WriteUInt(writer, DATE_ELEMENT_SIZE as u64).is_err() {
return false;
}
if SerializeInt(writer, value as u64, DATE_ELEMENT_SIZE).is_err() {
return false;
}
true
}
pub fn EbmlElementSizeArgI64(t: MkvId, value: i64) -> u64 {
// Size of EBML ID
let mut ebml_size: u64 = GetUIntSize(t as u64) as u64;
// Datasize
ebml_size += GetIntSize(value) as u64;
// Size of Datasize
ebml_size += 1;
ebml_size
}
pub fn WriteEbmlElementArgI64(writer: &mut dyn Writer, t: MkvId, value: i64) -> bool {
if WriteID(writer, t).is_err() {
return false;
}
let size = GetIntSize(value);
if WriteUInt(writer, size as u64).is_err() {
return false;
}
if SerializeInt(writer, value as u64, size).is_err() {
return false;
}
true
}
pub fn EbmlElementSizeArgU64(t: MkvId, value: u64) -> u64 {
EbmlElementSizeArgsU64(t, value, 0)
}
pub fn WriteEbmlElementArgU64(writer: &mut dyn Writer, t: MkvId, value: u64) -> bool {
WriteEbmlElementArgsU64(writer, t, value, 0)
}
pub fn EbmlElementSizeArgF32(t: MkvId, _value: f32) -> u64 {
// Size of EBML ID
let mut ebml_size: u64 = GetUIntSize(t as u64) as u64;
// Datasize
ebml_size += std::mem::size_of::<f32>() as u64;
// Size of Datasize
ebml_size += 1;
ebml_size
}
pub fn WriteEbmlElementArgF32(writer: &mut dyn Writer, t: MkvId, value: f32) -> bool {
if WriteID(writer, t).is_err() {
return false;
}
if WriteUInt(writer, 4).is_err() {
return false;
}
if SerializeFloat(writer, value).is_err() {
return false;
}
true
}
pub fn EbmlElementSizeArgsU64(t: MkvId, value: u64, fixed_size: u64) -> u64 {
// Size of EBML ID
let mut ebml_size: u64 = GetUIntSize(t as u64) as u64;
// Datasize
ebml_size += if fixed_size > 0 {
fixed_size
} else {
GetUIntSize(value) as u64
};
// Size of Datasize
ebml_size += 1;
ebml_size
}
pub fn WriteEbmlElementArgsU64(
writer: &mut dyn Writer,
t: MkvId,
value: u64,
fixed_size: u64,
) -> bool {
if WriteID(writer, t).is_err() {
return false;
}
let mut size: u64 = GetUIntSize(value) as u64;
if fixed_size > 0 {
if size > fixed_size {
return false;
}
size = fixed_size;
}
if WriteUInt(writer, size).is_err() {
return false;
}
if SerializeInt(writer, value, size as i32).is_err() {
return false;
}
true
}
pub fn EbmlElementSizeArgStr(t: MkvId, value: &str) -> u64 {
// Size of EBML ID
let mut ebml_size: u64 = GetUIntSize(t as u64) as u64;
// Datasize
ebml_size += value.len() as u64;
// Size of Datasize
ebml_size += GetCodedUIntSize(value.len() as u64) as u64;
ebml_size
}
pub fn WriteEbmlElementArgStr(writer: &mut dyn Writer, t: MkvId, value: &str) -> bool {
if WriteID(writer, t).is_err() {
return false;
}
let length = value.len() as u64;
if WriteUInt(writer, length).is_err() {
return false;
}
if writer.write(value.as_bytes()).is_err() {
return false;
}
return true;
}
pub fn EbmlElementSizeArgSlice(t: MkvId, value: &[u8]) -> u64 {
let size = value.len() as u64;
// Size of EBML ID
let mut ebml_size: u64 = GetUIntSize(t as u64) as u64;
// Datasize
ebml_size += size;
// Size of Datasize
ebml_size += GetCodedUIntSize(size) as u64;
ebml_size
}
pub fn WriteEbmlElementArgSlice(writer: &mut dyn Writer, t: MkvId, value: &[u8]) -> bool {
if WriteID(writer, t).is_err() {
return false;
}
if WriteUInt(writer, value.len() as u64).is_err() {
return false;
}
if writer.write(value).is_err() {
return false;
}
true
}
pub fn WriteVoidElement(writer: &mut dyn Writer, size: u64) -> u64 {
// Subtract one for the void ID and the coded size.
let void_entry_size: u64 = size - 1 - GetCodedUIntSize(size - 1) as u64;
let void_size: u64 = EbmlMasterElementSize(MkvId::MkvVoid, void_entry_size) + void_entry_size;
if void_size != size {
return 0;
}
let payload_position = writer.get_position();
if WriteID(writer, MkvId::MkvVoid).is_err() {
return 0;
}
if WriteUInt(writer, void_entry_size).is_err() {
return 0;
}
let value = vec![0; void_entry_size as usize];
if writer.write(&value).is_err() {
return 0;
}
let stop_position = writer.get_position();
if stop_position - payload_position != void_size {
return 0;
}
return void_size;
}
pub fn WriteEbmlHeader(writer: &mut dyn Writer, doc_type_version: u64, doc_type: &str) -> bool {
// Level 0
let mut size: u64 = EbmlElementSizeArgU64(MkvId::MkvEBMLVersion, 1);
size += EbmlElementSizeArgU64(MkvId::MkvEBMLReadVersion, 1);
size += EbmlElementSizeArgU64(MkvId::MkvEBMLMaxIDLength, 4);
size += EbmlElementSizeArgU64(MkvId::MkvEBMLMaxSizeLength, 8);
size += EbmlElementSizeArgStr(MkvId::MkvDocType, doc_type);
size += EbmlElementSizeArgU64(MkvId::MkvDocTypeVersion, doc_type_version);
size += EbmlElementSizeArgU64(MkvId::MkvDocTypeReadVersion, 2);
if !WriteEbmlMasterElement(writer, MkvId::MkvEBML, size) {
return false;
}
if !WriteEbmlElementArgU64(writer, MkvId::MkvEBMLVersion, 1) {
return false;
}
if !WriteEbmlElementArgU64(writer, MkvId::MkvEBMLReadVersion, 1) {
return false;
}
if !WriteEbmlElementArgU64(writer, MkvId::MkvEBMLMaxIDLength, 4) {
return false;
}
if !WriteEbmlElementArgU64(writer, MkvId::MkvEBMLMaxSizeLength, 8) {
return false;
}
if !WriteEbmlElementArgStr(writer, MkvId::MkvDocType, doc_type) {
return false;
}
if !WriteEbmlElementArgU64(writer, MkvId::MkvDocTypeVersion, doc_type_version) {
return false;
}
if !WriteEbmlElementArgU64(writer, MkvId::MkvDocTypeReadVersion, 2) {
return false;
}
true
}
pub fn WriteEbmlHeader2(writer: &mut dyn Writer, doc_type_version: u64) -> bool {
WriteEbmlHeader(writer, doc_type_version, DOC_TYPE_WEBM)
}
/*
fn WriteEbmlHeader3(writer:&mut dyn Writer) ->bool{
WriteEbmlHeader(writer, mkvmuxer::Segment::kDefaultDocTypeVersion)
}*/
|
use std::{collections::HashMap, marker::PhantomData};
use super::{
and::And, not::Not, or::Or, passthrough::Passthrough, ActiveFilter, DynamicFilter, FilterResult,
};
use crate::internals::{query::view::Fetch, storage::component::Component, world::WorldId};
/// A filter which performs coarse-grained change detection.
///
/// This filter will reject _most_ components which have not
/// been changed, but not all.
#[derive(Debug)]
pub struct ComponentChangedFilter<T: Component> {
_phantom: PhantomData<T>,
history: HashMap<WorldId, u64>,
world: Option<WorldId>,
threshold: u64,
maximum: u64,
}
impl<T: Component> Default for ComponentChangedFilter<T> {
fn default() -> Self {
Self {
_phantom: PhantomData,
history: Default::default(),
world: None,
threshold: 0,
maximum: 0,
}
}
}
impl<T: Component> Clone for ComponentChangedFilter<T> {
fn clone(&self) -> Self {
Self {
_phantom: PhantomData,
history: self.history.clone(),
world: None,
threshold: 0,
maximum: 0,
}
}
}
impl<T: Component> ActiveFilter for ComponentChangedFilter<T> {}
impl<T: Component> DynamicFilter for ComponentChangedFilter<T> {
fn prepare(&mut self, world: WorldId) {
if let Some(world) = self.world {
self.history.insert(world, self.maximum);
}
self.world = Some(world);
self.threshold = *self.history.entry(world).or_insert(0);
self.maximum = self.threshold;
}
fn matches_archetype<Fet: Fetch>(&mut self, fetch: &Fet) -> FilterResult {
if let Some(version) = fetch.version::<T>() {
if version > self.maximum {
self.maximum = version;
}
FilterResult::Match(version > self.threshold)
} else {
FilterResult::Defer
}
}
}
impl<T: Component> std::ops::Not for ComponentChangedFilter<T> {
type Output = Not<Self>;
#[inline]
fn not(self) -> Self::Output {
Not { filter: self }
}
}
impl<'a, T: Component, Rhs: ActiveFilter> std::ops::BitAnd<Rhs> for ComponentChangedFilter<T> {
type Output = And<(Self, Rhs)>;
#[inline]
fn bitand(self, rhs: Rhs) -> Self::Output {
And {
filters: (self, rhs),
}
}
}
impl<'a, T: Component> std::ops::BitAnd<Passthrough> for ComponentChangedFilter<T> {
type Output = Self;
#[inline]
fn bitand(self, _: Passthrough) -> Self::Output {
self
}
}
impl<'a, T: Component, Rhs: ActiveFilter> std::ops::BitOr<Rhs> for ComponentChangedFilter<T> {
type Output = Or<(Self, Rhs)>;
#[inline]
fn bitor(self, rhs: Rhs) -> Self::Output {
Or {
filters: (self, rhs),
}
}
}
impl<'a, T: Component> std::ops::BitOr<Passthrough> for ComponentChangedFilter<T> {
type Output = Self;
#[inline]
fn bitor(self, _: Passthrough) -> Self::Output {
self
}
}
|
use std::{collections::HashMap, hash::Hash};
// TODO: add accuracy to depth cache
// TODO: purge cached entries, keep count per depth, and if it reaches zero
// TODO: actually cache meshdata
#[derive(Debug, Hash, PartialEq, Eq, Clone)]
pub struct CacheKey {
char: char,
mesh_type: MeshType,
}
impl CacheKey {
pub(crate) fn new_3d(char: char, depth: f32) -> Self {
Self {
char,
mesh_type: MeshType::Mesh3d(Depth(depth)),
}
}
}
#[derive(Debug, Hash, PartialEq, Eq, Clone)]
pub enum MeshType {
// Mesh2d,
Mesh3d(Depth),
}
#[derive(Debug, PartialEq, Clone)]
pub struct Depth(f32);
impl Hash for Depth {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
let val = (self.0 * 100.) as usize;
val.hash(state);
}
}
// FIXME uhm, empty?
impl Eq for Depth {}
pub struct MeshCache {
pub(crate) meshes: HashMap<CacheKey, ttf2mesh::Mesh<'static, ttf2mesh::Mesh3d>>,
}
impl Default for MeshCache {
fn default() -> Self {
Self {
meshes: HashMap::new(),
}
}
}
unsafe impl Send for MeshCache {} // FIXME: verify soundness
unsafe impl Sync for MeshCache {} // FIXME: verify soundness
|
use common::GiphyGif;
use seed::{*, prelude::*};
use crate::state::ModelEvent;
/// A card displaying information on a Giphy GIF.
pub fn gifcard(
gif: &GiphyGif,
catg_input: Option<&String>,
mut on_save: impl FnMut(String) -> ModelEvent + Clone + 'static,
mut on_remove: impl FnMut(String) -> ModelEvent + Clone + 'static,
mut on_update_category: impl FnMut(String, String) -> ModelEvent + Clone + 'static,
mut on_categorize: impl FnMut(String) -> ModelEvent + Clone + 'static,
) -> Node<ModelEvent> {
// Build closures.
let (gid0, gid1, gid2, gid3) = (gif.id.clone(), gif.id.clone(), gif.id.clone(), gif.id.clone());
let save_event = mouse_ev(Ev::Click, move |_| on_save(gid0.clone()));
let _remove_event = mouse_ev(Ev::Click, move |_| on_remove(gid1.clone())); // TODO: wire up remove.
let categorize_event = input_ev(Ev::Blur, move |_| on_categorize(gid2.clone()));
let update_catg_event = input_ev(Ev::Input, move |catg| on_update_category(gid3.clone(), catg));
// Build icon for favorite status.
let is_saved = gif.is_saved;
let icon = match is_saved {
true => button!(class!("button is-rounded is-small has-text-warning"), save_event, b!(attrs!(At::Class => "fas fa-star"))),
false => a!(class!("button is-rounded is-outlined is-small has-text-warning"), save_event, i!(attrs!(At::Class => "far fa-star"))),
};
// Setup category controls.
let category_val = catg_input.map(|c| c.as_str())
.or(gif.category.as_ref().map(|c| c.as_str()))
.unwrap_or("");
let mut input_attrs = attrs!(
At::Class=>"input is-small is-rounded"; At::Value=>category_val;
At::Type=>"text"; At::Placeholder=>"Categorize...";
);
if !is_saved {
input_attrs.add(At::Disabled, "true");
}
div!(class!("GifCard-card column is-full-mobile is-half-tablet is-one-quarter-desktop"),
div!(class!("box p-three-quarter"),
div!(class!("media justify-content-center"),
div!(class!("media-center"),
div!(class!("content"),
figure!(class!("image"),
img!(attrs!("src" => &gif.url))
),
p!(class!("mb-half is-size-7"), &gif.title),
div!(class!("field is-grouped is-grouped-centered"),
p!(class!("control"),
icon
),
p!(class!("control"),
input!(input_attrs, update_catg_event, categorize_event)
),
)
)
)
)
)
)
}
|
use log::debug;
use pretty_env_logger::env_logger;
use regex::Regex;
use serde_derive::Deserialize;
use std::env;
use std::fs::File;
use std::io::prelude::*;
use std::io::SeekFrom;
#[derive(Deserialize, Debug)]
struct Config {
extruder_regex: String,
replace_comment: String,
custom_commands: String,
t0_load: String,
t0_unload: String,
t1_load: String,
t1_unload: String,
t2_load: String,
t2_unload: String,
t3_load: String,
t3_unload: String,
t4_load: String,
t4_unload: String,
t5_load: String,
t5_unload: String,
t6_load: String,
t6_unload: String,
t7_load: String,
t7_unload: String,
t8_load: String,
t8_unload: String,
}
fn main() {
env_logger::init();
// Get the config path. it neads to be in the same directory as the binary.
let mut config_path: String = String::new();
match env::current_exe() {
Ok(exe_path) => config_path = exe_path.display().to_string(),
Err(e) => println!("failed to get current exe path: {}", e),
}
let regex = Regex::new(&String::from(r"[A-Za-z0-9_?\-\.][A-Za-z0-9?.?-]+$")).unwrap();
let config_path = regex.replace_all(&config_path, "ploter.conf").to_string();
// Read the config, parse it to toml
let mut config_file = File::open(config_path).unwrap();
let mut config_file_string: String = String::new();
config_file.read_to_string(&mut config_file_string).unwrap();
let config_file_toml: Config = toml::from_str(&config_file_string).unwrap();
// Get the information from slicer
let arguments: Vec<String> = env::args().collect();
let path_gcode = arguments[1].clone();
// Open the gcode file
let mut gcode_file = File::open(&path_gcode).unwrap();
gcode_file.seek(SeekFrom::Start(0)).unwrap();
let mut gcode_data = String::new();
gcode_file.read_to_string(&mut gcode_data); // Makes that all ascii characters are seen, like \n
drop(gcode_file); // Close the file early
// Replace custom comment with custom commands
let gcode_data = gcode_data.replacen(
&config_file_toml.replace_comment,
&config_file_toml.custom_commands,
1,
);
// Delete all things that the regex points to
let regex = Regex::new(&config_file_toml.extruder_regex).unwrap();
let gcode_data = regex.replace_all(&gcode_data, "").to_string();
let gcode_data_split = gcode_data.split("\n");
// Manage multicolor tool changing
let mut gcode_data_push: String = String::new();
let mut first_change: bool = true;
let mut previous_change: i32 = 0;
for line_string in gcode_data_split {
gcode_data_push.push_str(&manage_tool(
line_string,
&mut first_change,
&mut previous_change,
&config_file_toml,
));
}
// Save the file
let mut file_write = File::create(&path_gcode).unwrap();
file_write.write(gcode_data_push.as_bytes()).unwrap();
}
fn manage_tool(
line_string: &str,
first_change: &mut bool,
previous_change: &mut i32,
config_file_toml: &Config,
) -> String {
let mut line_string_edited: String = String::new();
if line_string.contains("T0 ; change extruder") {
if first_change == &true {
*first_change = false;
line_string_edited = config_file_toml.t0_load.clone();
} else {
line_string_edited = previous_unload_return(previous_change, config_file_toml);
line_string_edited.push_str(&config_file_toml.t0_load.clone());
}
*previous_change = 0;
} else if line_string.contains("T1 ; change extruder") {
if first_change == &true {
*first_change = false;
line_string_edited = config_file_toml.t1_load.clone();
} else {
line_string_edited = previous_unload_return(previous_change, config_file_toml);
line_string_edited.push_str(&config_file_toml.t1_load.clone());
}
*previous_change = 1;
} else if line_string.contains("T2 ; change extruder") {
if first_change == &true {
*first_change = false;
line_string_edited = config_file_toml.t2_load.clone();
} else {
line_string_edited = previous_unload_return(previous_change, config_file_toml);
line_string_edited.push_str(&config_file_toml.t2_load.clone());
}
*previous_change = 2;
} else if line_string.contains("T3 ; change extruder") {
if first_change == &true {
*first_change = false;
line_string_edited = config_file_toml.t3_load.clone();
} else {
line_string_edited = previous_unload_return(previous_change, config_file_toml);
line_string_edited.push_str(&config_file_toml.t3_load.clone());
}
*previous_change = 3;
} else if line_string.contains("T4 ; change extruder") {
if first_change == &true {
*first_change = false;
line_string_edited = config_file_toml.t4_load.clone();
} else {
line_string_edited = previous_unload_return(previous_change, config_file_toml);
line_string_edited.push_str(&config_file_toml.t4_load.clone());
}
*previous_change = 4;
} else if line_string.contains("T5 ; change extruder") {
if first_change == &true {
*first_change = false;
line_string_edited = config_file_toml.t5_load.clone();
} else {
line_string_edited = previous_unload_return(previous_change, config_file_toml);
line_string_edited.push_str(&config_file_toml.t5_load.clone());
}
*previous_change = 5;
} else if line_string.contains("T6 ; change extruder") {
if first_change == &true {
*first_change = false;
line_string_edited = config_file_toml.t6_load.clone();
} else {
line_string_edited = previous_unload_return(previous_change, config_file_toml);
line_string_edited.push_str(&config_file_toml.t6_load.clone());
}
*previous_change = 6;
} else if line_string.contains("T7 ; change extruder") {
if first_change == &true {
*first_change = false;
line_string_edited = config_file_toml.t7_load.clone();
} else {
line_string_edited = previous_unload_return(previous_change, config_file_toml);
line_string_edited.push_str(&config_file_toml.t7_load.clone());
}
*previous_change = 7;
} else if line_string.contains("T8 ; change extruder") {
if first_change == &true {
*first_change = false;
line_string_edited = config_file_toml.t8_load.clone();
} else {
line_string_edited = previous_unload_return(previous_change, config_file_toml);
line_string_edited.push_str(&config_file_toml.t8_load.clone());
}
*previous_change = 8;
} else {
line_string_edited.push_str(line_string);
line_string_edited.push_str("\n");
}
line_string_edited
}
fn previous_unload_return(previous_change: &mut i32, config_file_toml: &Config) -> String {
if previous_change == &0 {
config_file_toml.t0_unload.clone()
} else if previous_change == &1 {
config_file_toml.t1_unload.clone()
} else if previous_change == &2 {
config_file_toml.t2_unload.clone()
} else if previous_change == &3 {
config_file_toml.t3_unload.clone()
} else if previous_change == &4 {
config_file_toml.t4_unload.clone()
} else if previous_change == &5 {
config_file_toml.t5_unload.clone()
} else if previous_change == &6 {
config_file_toml.t6_unload.clone()
} else if previous_change == &7 {
config_file_toml.t7_unload.clone()
} else if previous_change == &8 {
config_file_toml.t8_unload.clone()
} else {
String::from("PAUSE\n")
}
}
|
use libc;
extern "C" {
#[no_mangle]
fn cbor_encoder_close_container(
encoder: *mut CborEncoder_0,
containerEncoder: *const CborEncoder_0,
) -> CborError_0;
}
pub type ptrdiff_t = libc::c_long;
pub type size_t = libc::c_ulong;
pub type uint8_t = libc::c_uchar;
/* #define the constants so we can check with #ifdef */
/* Error API */
pub type CborError = libc::c_int;
/* INT_MAX on two's complement machines */
pub const CborErrorInternalError: CborError = 2147483647;
pub const CborErrorOutOfMemory: CborError = -2147483648;
pub const CborErrorJsonNotImplemented: CborError = 1282;
pub const CborErrorJsonObjectKeyNotString: CborError = 1281;
/* errors in converting to JSON */
pub const CborErrorJsonObjectKeyIsAggregate: CborError = 1280;
pub const CborErrorUnsupportedType: CborError = 1026;
pub const CborErrorNestingTooDeep: CborError = 1025;
/* internal implementation errors */
pub const CborErrorDataTooLarge: CborError = 1024;
pub const CborErrorTooFewItems: CborError = 769;
/* encoder errors */
pub const CborErrorTooManyItems: CborError = 768;
pub const CborErrorMapKeysNotUnique: CborError = 523;
pub const CborErrorMapNotSorted: CborError = 522;
pub const CborErrorMapKeyNotString: CborError = 521;
pub const CborErrorOverlongEncoding: CborError = 520;
pub const CborErrorImproperValue: CborError = 519;
pub const CborErrorExcludedValue: CborError = 518;
pub const CborErrorExcludedType: CborError = 517;
pub const CborErrorInvalidUtf8TextString: CborError = 516;
pub const CborErrorDuplicateObjectKeys: CborError = 515;
pub const CborErrorInappropriateTagForType: CborError = 514;
pub const CborErrorUnknownTag: CborError = 513;
/* parser errors in strict mode parsing only */
pub const CborErrorUnknownSimpleType: CborError = 512;
/* types of value less than 32 encoded in two bytes */
pub const CborErrorIllegalSimpleType: CborError = 262;
pub const CborErrorIllegalNumber: CborError = 261;
/* type not allowed here */
pub const CborErrorIllegalType: CborError = 260;
/* can only happen in major type 7 */
pub const CborErrorUnknownType: CborError = 259;
pub const CborErrorUnexpectedBreak: CborError = 258;
pub const CborErrorUnexpectedEOF: CborError = 257;
/* parser errors streaming errors */
pub const CborErrorGarbageAtEnd: CborError = 256;
pub const CborErrorIO: CborError = 4;
pub const CborErrorAdvancePastEOF: CborError = 3;
/* request for length in array, map, or string with indeterminate length */
pub const CborErrorUnknownLength: CborError = 2;
/* errors in all modes */
pub const CborUnknownError: CborError = 1;
pub const CborNoError: CborError = 0;
pub type CborError_0 = CborError;
/* Encoder API */
#[derive(Copy, Clone)]
#[repr(C)]
pub struct CborEncoder {
pub data: unnamed,
pub end: *const uint8_t,
pub remaining: size_t,
pub flags: libc::c_int,
}
#[derive(Copy, Clone)]
#[repr(C)]
pub union unnamed {
pub ptr: *mut uint8_t,
pub bytes_needed: ptrdiff_t,
}
pub type CborEncoder_0 = CborEncoder;
#[no_mangle]
pub unsafe extern "C" fn cbor_encoder_close_container_checked(
mut encoder: *mut CborEncoder_0,
mut containerEncoder: *const CborEncoder_0,
) -> CborError_0 {
return cbor_encoder_close_container(encoder, containerEncoder);
}
|
// Copyright 2021 rust-ipfs-api Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
//
use crate::{read::LineDecoder, request, response, Backend};
use async_trait::async_trait;
use bytes::Bytes;
use common_multipart_rfc7578::client::multipart;
use futures::{future, FutureExt, Stream, TryStreamExt};
use std::{
fs::File,
io::{Cursor, Read},
path::{Path, PathBuf},
};
const FILE_DESCRIPTOR_LIMIT: usize = 127;
// Implements a call to the IPFS that returns a streaming body response.
// Implementing this in a macro is necessary because the Rust compiler
// can't reason about the lifetime of the request instance properly. It
// thinks that the request needs to live as long as the returned stream,
// but in reality, the request instance is only used to build the Hyper
// or Actix request.
//
macro_rules! impl_stream_api_response {
(($self:ident, $req:expr, $form:expr) => $call:ident) => {
impl_stream_api_response! {
($self, $req, $form) |req| => { $self.$call(req) }
}
};
(($self:ident, $req:expr, $form:expr) |$var:ident| => $impl:block) => {
match $self.build_base_request(&$req, $form) {
Ok($var) => $impl,
Err(e) => Box::new(future::err(e).into_stream()),
}
};
}
#[async_trait(?Send)]
pub trait IpfsApi: Backend {
/// Add file to Ipfs.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
/// use std::io::Cursor;
///
/// let client = IpfsClient::default();
/// let data = Cursor::new("Hello World!");
/// let res = client.add(data);
/// ```
///
async fn add<R>(&self, data: R) -> Result<response::AddResponse, Self::Error>
where
R: 'static + Read + Send + Sync,
{
self.add_with_options(data, request::Add::default()).await
}
/// Add a file to IPFS with options.
///
/// # Examples
///
/// ```no_run
/// # extern crate ipfs_api;
/// #
/// use ipfs_api::{IpfsApi, IpfsClient};
/// use std::io::Cursor;
///
/// # fn main() {
/// let client = IpfsClient::default();
/// let data = Cursor::new("Hello World!");
/// #[cfg(feature = "with-builder")]
/// let add = ipfs_api::request::Add::builder()
/// .chunker("rabin-512-1024-2048")
/// .build();
/// #[cfg(not(feature = "with-builder"))]
/// let add = ipfs_api::request::Add {
/// chunker: Some("rabin-512-1024-2048"),
/// ..Default::default()
/// };
/// let req = client.add_with_options(data, add);
/// # }
/// ```
///
async fn add_with_options<R>(
&self,
data: R,
add: request::Add<'_>,
) -> Result<response::AddResponse, Self::Error>
where
R: 'static + Read + Send + Sync,
{
let mut form = multipart::Form::default();
form.add_reader("path", data);
self.request(add, Some(form)).await
}
/// Add a path to Ipfs. Can be a file or directory.
/// A hard limit of 128 open file descriptors is set such
/// that any small additional files are stored in-memory.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let path = "./src";
/// let res = client.add_path(path);
/// ```
///
async fn add_path<P>(&self, path: P) -> Result<Vec<response::AddResponse>, Self::Error>
where
P: AsRef<Path>,
{
let prefix = path.as_ref().parent();
let mut paths_to_add: Vec<(PathBuf, u64)> = vec![];
for path in walkdir::WalkDir::new(path.as_ref()) {
match path {
Ok(entry) if entry.file_type().is_file() => {
let file_size = entry
.metadata()
.map(|metadata| metadata.len())
.map_err(|e| crate::Error::Io(e.into()))?;
paths_to_add.push((entry.path().to_path_buf(), file_size));
}
Ok(_) => (),
Err(e) => return Err(crate::Error::Io(e.into()).into()),
}
}
paths_to_add.sort_unstable_by(|(_, a), (_, b)| a.cmp(b).reverse());
let mut it = 0;
let mut form = multipart::Form::default();
for (path, file_size) in paths_to_add {
let mut file = File::open(&path).map_err(crate::Error::Io)?;
let file_name = match prefix {
Some(prefix) => path.strip_prefix(prefix).unwrap(),
None => path.as_path(),
}
.to_string_lossy();
if it < FILE_DESCRIPTOR_LIMIT {
form.add_reader_file("path", file, file_name);
it += 1;
} else {
let mut buf = Vec::with_capacity(file_size as usize);
let _ = file.read_to_end(&mut buf).map_err(crate::Error::Io)?;
form.add_reader_file("path", Cursor::new(buf), file_name);
}
}
let req = self.build_base_request(&request::Add::default(), Some(form))?;
self.request_stream_json(req).try_collect().await
}
/// Returns the current ledger for a peer.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.bitswap_ledger("QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ");
/// ```
///
async fn bitswap_ledger(
&self,
peer: &str,
) -> Result<response::BitswapLedgerResponse, Self::Error> {
self.request(request::BitswapLedger { peer }, None).await
}
/// Triggers a reprovide.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.bitswap_reprovide();
/// ```
///
async fn bitswap_reprovide(&self) -> Result<response::BitswapReprovideResponse, Self::Error> {
self.request_empty(request::BitswapReprovide, None).await
}
/// Returns some stats about the bitswap agent.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.bitswap_stat();
/// ```
///
async fn bitswap_stat(&self) -> Result<response::BitswapStatResponse, Self::Error> {
self.request(request::BitswapStat, None).await
}
/// Remove a given block from your wantlist.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.bitswap_unwant("QmXdNSQx7nbdRvkjGCEQgVjVtVwsHvV8NmV2a8xzQVwuFA");
/// ```
///
async fn bitswap_unwant(
&self,
key: &str,
) -> Result<response::BitswapUnwantResponse, Self::Error> {
self.request_empty(request::BitswapUnwant { key }, None)
.await
}
/// Shows blocks on the wantlist for you or the specified peer.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.bitswap_wantlist(
/// Some("QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ")
/// );
/// ```
///
async fn bitswap_wantlist(
&self,
peer: Option<&str>,
) -> Result<response::BitswapWantlistResponse, Self::Error> {
self.request(request::BitswapWantlist { peer }, None).await
}
/// Gets a raw IPFS block.
///
/// # Examples
///
/// ```no_run
/// use futures::TryStreamExt;
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let hash = "QmXdNSQx7nbdRvkjGCEQgVjVtVwsHvV8NmV2a8xzQVwuFA";
/// let res = client
/// .block_get(hash)
/// .map_ok(|chunk| chunk.to_vec())
/// .try_concat();
/// ```
///
fn block_get(&self, hash: &str) -> Box<dyn Stream<Item = Result<Bytes, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::BlockGet { hash }, None) => request_stream_bytes
}
}
/// Store input as an IPFS block.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
/// use std::io::Cursor;
///
/// let client = IpfsClient::default();
/// let data = Cursor::new("Hello World!");
/// let res = client.block_put(data);
/// ```
///
async fn block_put<R>(&self, data: R) -> Result<response::BlockPutResponse, Self::Error>
where
R: 'static + Read + Send + Sync,
{
let mut form = multipart::Form::default();
form.add_reader("data", data);
self.request(request::BlockPut, Some(form)).await
}
/// Removes an IPFS block.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.block_rm("QmXdNSQx7nbdRvkjGCEQgVjVtVwsHvV8NmV2a8xzQVwuFA");
/// ```
///
async fn block_rm(&self, hash: &str) -> Result<response::BlockRmResponse, Self::Error> {
self.request(request::BlockRm { hash }, None).await
}
/// Prints information about a raw IPFS block.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.block_stat("QmXdNSQx7nbdRvkjGCEQgVjVtVwsHvV8NmV2a8xzQVwuFA");
/// ```
///
async fn block_stat(&self, hash: &str) -> Result<response::BlockStatResponse, Self::Error> {
self.request(request::BlockStat { hash }, None).await
}
/// Add default peers to the bootstrap list.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.bootstrap_add_default();
/// ```
///
async fn bootstrap_add_default(
&self,
) -> Result<response::BootstrapAddDefaultResponse, Self::Error> {
self.request(request::BootstrapAddDefault, None).await
}
/// Lists peers in bootstrap list.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.bootstrap_list();
/// ```
///
async fn bootstrap_list(&self) -> Result<response::BootstrapListResponse, Self::Error> {
self.request(request::BootstrapList, None).await
}
/// Removes all peers in bootstrap list.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.bootstrap_rm_all();
/// ```
///
async fn bootstrap_rm_all(&self) -> Result<response::BootstrapRmAllResponse, Self::Error> {
self.request(request::BootstrapRmAll, None).await
}
/// Returns the contents of an Ipfs object.
///
/// # Examples
///
/// ```no_run
/// use futures::TryStreamExt;
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let hash = "QmXdNSQx7nbdRvkjGCEQgVjVtVwsHvV8NmV2a8xzQVwuFA";
/// let res = client
/// .cat(hash)
/// .map_ok(|chunk| chunk.to_vec())
/// .try_concat();
/// ```
///
fn cat(&self, path: &str) -> Box<dyn Stream<Item = Result<Bytes, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::Cat { path }, None) => request_stream_bytes
}
}
/// List available commands that the server accepts.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.commands();
/// ```
///
async fn commands(&self) -> Result<response::CommandsResponse, Self::Error> {
self.request(request::Commands, None).await
}
/// Get ipfs config strings.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.config_get_string("Identity.PeerID");
/// ```
///
async fn config_get_string(&self, key: &str) -> Result<response::ConfigResponse, Self::Error> {
self.request(
request::Config {
key,
value: None,
boolean: None,
stringified_json: None,
},
None,
)
.await
}
/// Get ipfs config booleans.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.config_get_bool("Datastore.HashOnRead");
/// ```
///
async fn config_get_bool(&self, key: &str) -> Result<response::ConfigResponse, Self::Error> {
self.request(
request::Config {
key,
value: None,
boolean: None,
stringified_json: None,
},
None,
)
.await
}
/// Get ipfs config json.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.config_get_json("Mounts");
/// ```
///
async fn config_get_json(&self, key: &str) -> Result<response::ConfigResponse, Self::Error> {
self.request(
request::Config {
key,
value: None,
boolean: None,
stringified_json: None,
},
None,
)
.await
}
/// Set ipfs config string.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.config_set_string("Routing.Type", "dht");
/// ```
///
async fn config_set_string(
&self,
key: &str,
value: &str,
) -> Result<response::ConfigResponse, Self::Error> {
self.request(
request::Config {
key,
value: Some(value),
boolean: None,
stringified_json: None,
},
None,
)
.await
}
/// Set ipfs config boolean.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.config_set_bool("Pubsub.DisableSigning", false);
/// ```
///
async fn config_set_bool(
&self,
key: &str,
value: bool,
) -> Result<response::ConfigResponse, Self::Error> {
self.request(
request::Config {
key,
value: Some(&value.to_string()),
boolean: Some(true),
stringified_json: None,
},
None,
)
.await
}
/// Set ipfs config json.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.config_set_json("Discovery", r#"{"MDNS":{"Enabled":true,"Interval":10}}"#);
/// ```
///
async fn config_set_json(
&self,
key: &str,
value: &str,
) -> Result<response::ConfigResponse, Self::Error> {
self.request(
request::Config {
key,
value: Some(value),
boolean: None,
stringified_json: Some(true),
},
None,
)
.await
}
/// Opens the config file for editing (on the server).
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.config_edit();
/// ```
///
async fn config_edit(&self) -> Result<response::ConfigEditResponse, Self::Error> {
self.request(request::ConfigEdit, None).await
}
/// Replace the config file.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
/// use std::io::Cursor;
///
/// let client = IpfsClient::default();
/// let config = Cursor::new("{..json..}");
/// let res = client.config_replace(config);
/// ```
///
async fn config_replace<R>(
&self,
data: R,
) -> Result<response::ConfigReplaceResponse, Self::Error>
where
R: 'static + Read + Send + Sync,
{
let mut form = multipart::Form::default();
form.add_reader("file", data);
self.request_empty(request::ConfigReplace, Some(form)).await
}
/// Show the current config of the server.
///
/// Returns an unparsed json string, due to an unclear spec.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.config_show();
/// ```
///
async fn config_show(&self) -> Result<response::ConfigShowResponse, Self::Error> {
self.request_string(request::ConfigShow, None).await
}
/// Returns information about a dag node in Ipfs.
///
/// ```no_run
/// use futures::TryStreamExt;
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let hash = "QmXdNSQx7nbdRvkjGCEQgVjVtVwsHvV8NmV2a8xzQVwuFA";
/// let res = client
/// .dag_get(hash)
/// .map_ok(|chunk| chunk.to_vec())
/// .try_concat();
/// ```
///
fn dag_get(&self, path: &str) -> Box<dyn Stream<Item = Result<Bytes, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::DagGet { path }, None) => request_stream_bytes
}
}
/// Add a DAG node to Ipfs.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
/// use std::io::Cursor;
///
/// let client = IpfsClient::default();
/// let data = Cursor::new(r#"{ "hello" : "world" }"#);
/// let res = client.dag_put(data);
/// ```
///
async fn dag_put<R>(&self, data: R) -> Result<response::DagPutResponse, Self::Error>
where
R: 'static + Read + Send + Sync,
{
let mut form = multipart::Form::default();
form.add_reader("object data", data);
self.request(request::DagPut, Some(form)).await
}
// TODO /dag/resolve
/// Query the DHT for all of the multiaddresses associated with a Peer ID.
///
/// ```no_run
/// use futures::TryStreamExt;
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let peer = "QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM";
/// let res = client.dht_findpeer(peer).try_collect::<Vec<_>>();
/// ```
///
fn dht_findpeer(
&self,
peer: &str,
) -> Box<dyn Stream<Item = Result<response::DhtFindPeerResponse, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::DhtFindPeer { peer }, None) => request_stream_json
}
}
/// Find peers in the DHT that can provide a specific value given a key.
///
/// ```no_run
/// use futures::TryStreamExt;
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let key = "QmXdNSQx7nbdRvkjGCEQgVjVtVwsHvV8NmV2a8xzQVwuFA";
/// let res = client.dht_findprovs(key).try_collect::<Vec<_>>();
/// ```
///
fn dht_findprovs(
&self,
key: &str,
) -> Box<dyn Stream<Item = Result<response::DhtFindProvsResponse, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::DhtFindProvs { key }, None) => request_stream_json
}
}
/// Query the DHT for a given key.
///
/// ```no_run
/// use futures::TryStreamExt;
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let key = "QmXdNSQx7nbdRvkjGCEQgVjVtVwsHvV8NmV2a8xzQVwuFA";
/// let res = client.dht_get(key).try_collect::<Vec<_>>();
/// ```
///
fn dht_get(
&self,
key: &str,
) -> Box<dyn Stream<Item = Result<response::DhtGetResponse, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::DhtGet { key }, None) => request_stream_json
}
}
/// Announce to the network that you are providing a given value.
///
/// ```no_run
/// use futures::TryStreamExt;
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let key = "QmXdNSQx7nbdRvkjGCEQgVjVtVwsHvV8NmV2a8xzQVwuFA";
/// let res = client.dht_provide(key).try_collect::<Vec<_>>();
/// ```
///
fn dht_provide(
&self,
key: &str,
) -> Box<dyn Stream<Item = Result<response::DhtProvideResponse, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::DhtProvide { key }, None) => request_stream_json
}
}
/// Write a key/value pair to the DHT.
///
/// ```no_run
/// use futures::TryStreamExt;
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.dht_put("test", "Hello World!").try_collect::<Vec<_>>();
/// ```
///
fn dht_put(
&self,
key: &str,
value: &str,
) -> Box<dyn Stream<Item = Result<response::DhtPutResponse, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::DhtPut { key, value }, None) => request_stream_json
}
}
/// Find the closest peer given the peer ID by querying the DHT.
///
/// ```no_run
/// use futures::TryStreamExt;
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let peer = "QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM";
/// let res = client.dht_query(peer).try_collect::<Vec<_>>();
/// ```
///
fn dht_query(
&self,
peer: &str,
) -> Box<dyn Stream<Item = Result<response::DhtQueryResponse, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::DhtQuery { peer }, None) => request_stream_json
}
}
/// Clear inactive requests from the log.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.diag_cmds_clear();
/// ```
///
async fn diag_cmds_clear(&self) -> Result<response::DiagCmdsClearResponse, Self::Error> {
self.request_empty(request::DiagCmdsClear, None).await
}
/// Set how long to keep inactive requests in the log.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.diag_cmds_set_time("1m");
/// ```
///
async fn diag_cmds_set_time(
&self,
time: &str,
) -> Result<response::DiagCmdsSetTimeResponse, Self::Error> {
self.request_empty(request::DiagCmdsSetTime { time }, None)
.await
}
/// Print system diagnostic information.
///
/// Note: There isn't good documentation on what this call is supposed to return.
/// It might be platform dependent, but if it isn't, this can be fixed to return
/// an actual object.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.diag_sys();
/// ```
///
async fn diag_sys(&self) -> Result<response::DiagSysResponse, Self::Error> {
self.request_string(request::DiagSys, None).await
}
/// Resolve DNS link.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.dns("ipfs.io", true);
/// ```
///
async fn dns(&self, link: &str, recursive: bool) -> Result<response::DnsResponse, Self::Error> {
self.request(request::Dns { link, recursive }, None).await
}
/// List directory for Unix filesystem objects.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.file_ls("/ipns/ipfs.io");
/// ```
///
async fn file_ls(&self, path: &str) -> Result<response::FileLsResponse, Self::Error> {
self.request(request::FileLs { path }, None).await
}
/// Copy files into MFS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.files_cp("/path/to/file", "/dest");
/// ```
///
async fn files_cp(
&self,
path: &str,
dest: &str,
) -> Result<response::FilesCpResponse, Self::Error> {
self.files_cp_with_options(request::FilesCp {
path,
dest,
..Default::default()
})
.await
}
/// Copy files into MFS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.files_cp("/path/to/file", "/dest");
/// ```
///
async fn files_cp_with_options(
&self,
options: request::FilesCp<'_>,
) -> Result<response::FilesCpResponse, Self::Error> {
self.request_empty(options, None).await
}
/// Flush a path's data to disk.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.files_flush(None);
/// let res = client.files_flush(Some("/tmp"));
/// ```
///
async fn files_flush(
&self,
path: Option<&str>,
) -> Result<response::FilesFlushResponse, Self::Error> {
self.request_empty(request::FilesFlush { path }, None).await
}
/// List directories in MFS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.files_ls(None);
/// let res = client.files_ls(Some("/tmp"));
/// ```
///
async fn files_ls(&self, path: Option<&str>) -> Result<response::FilesLsResponse, Self::Error> {
self.files_ls_with_options(request::FilesLs {
path,
..Default::default()
})
.await
}
/// List directories in MFS..
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// #[cfg(feature = "with-builder")]
/// let req = ipfs_api::request::FilesLs::builder()
/// // .path("/") // defaults to /
/// .unsorted(false)
/// .long(true)
/// .build();
/// #[cfg(not(feature = "with-builder"))]
/// let req = ipfs_api::request::FilesLs {
/// path: None, // defaults to /
/// unsorted: Some(false),
/// long: Some(true),
/// };
/// let res = client.files_ls_with_options(req);
/// ```
///
/// Defaults to `-U`, so the output is unsorted.
///
async fn files_ls_with_options(
&self,
options: request::FilesLs<'_>,
) -> Result<response::FilesLsResponse, Self::Error> {
self.request(options, None).await
}
/// Make directories in MFS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.files_mkdir("/test", false);
/// let res = client.files_mkdir("/test/nested/dir", true);
/// ```
///
async fn files_mkdir(
&self,
path: &str,
parents: bool,
) -> Result<response::FilesMkdirResponse, Self::Error> {
self.files_mkdir_with_options(request::FilesMkdir {
path,
parents: Some(parents),
..Default::default()
})
.await
}
/// Make directories in MFS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// #[cfg(feature = "with-builder")]
/// let req = ipfs_api::request::FilesMkdir::builder()
/// .path("/test/nested/dir")
/// .parents(true)
/// .flush(false)
/// .build();
/// #[cfg(not(feature = "with-builder"))]
/// let req = ipfs_api::request::FilesMkdir {
/// path: "/test/nested/dir",
/// parents: Some(true),
/// flush: Some(false),
/// .. Default::default()
/// };
/// let res = client.files_mkdir_with_options(req);
/// ```
///
async fn files_mkdir_with_options(
&self,
options: request::FilesMkdir<'_>,
) -> Result<response::FilesMkdirResponse, Self::Error> {
self.request_empty(options, None).await
}
/// Copy files into MFS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.files_mv("/test/tmp.json", "/test/file.json");
/// ```
///
async fn files_mv(
&self,
path: &str,
dest: &str,
) -> Result<response::FilesMvResponse, Self::Error> {
self.files_mv_with_options(request::FilesMv {
path,
dest,
..Default::default()
})
.await
}
/// Copy files into MFS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.files_mv_with_options(
/// ipfs_api::request::FilesMv {
/// path: "/test/tmp.json",
/// dest: "/test/file.json",
/// flush: Some(false),
/// }
/// );
/// ```
///
async fn files_mv_with_options(
&self,
options: request::FilesMv<'_>,
) -> Result<response::FilesMvResponse, Self::Error> {
self.request_empty(options, None).await
}
/// Read a file in MFS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.files_read("/test/file.json");
/// ```
///
fn files_read(&self, path: &str) -> Box<dyn Stream<Item = Result<Bytes, Self::Error>> + Unpin> {
self.files_read_with_options(request::FilesRead {
path,
..request::FilesRead::default()
})
}
/// Read a file in MFS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// #[cfg(feature = "with-builder")]
/// let req = ipfs_api::request::FilesRead::builder()
/// .path("/test/file.json")
/// .offset(1024)
/// .count(8)
/// .build();
/// #[cfg(not(feature = "with-builder"))]
/// let req = ipfs_api::request::FilesRead {
/// path: "/test/file.json",
/// offset: Some(1024),
/// count: Some(8),
/// };
/// let res = client.files_read_with_options(req);
/// ```
///
fn files_read_with_options(
&self,
options: request::FilesRead,
) -> Box<dyn Stream<Item = Result<Bytes, Self::Error>> + Unpin> {
impl_stream_api_response! { (self, options, None) => request_stream_bytes }
}
/// Remove a file in MFS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.files_rm("/test/dir", true);
/// let res = client.files_rm("/test/file.json", false);
/// ```
///
async fn files_rm(
&self,
path: &str,
recursive: bool,
) -> Result<response::FilesRmResponse, Self::Error> {
self.files_rm_with_options(request::FilesRm {
path,
recursive: Some(recursive),
..Default::default()
})
.await
}
/// Remove a file in MFS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// #[cfg(feature = "with-builder")]
/// let req = ipfs_api::request::FilesRm::builder()
/// .path("/test/somefile.json")
/// .recursive(false)
/// .flush(false)
/// .build();
/// #[cfg(not(feature = "with-builder"))]
/// let req = ipfs_api::request::FilesRm {
/// path: "/test/somefile.json",
/// recursive: Some(false),
/// flush: Some(false),
/// };
/// let res = client.files_rm_with_options(req);
/// ```
///
async fn files_rm_with_options(
&self,
options: request::FilesRm<'_>,
) -> Result<response::FilesRmResponse, Self::Error> {
self.request_empty(options, None).await
}
/// Display a file's status in MFS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.files_stat("/test/file.json");
/// ```
///
async fn files_stat(&self, path: &str) -> Result<response::FilesStatResponse, Self::Error> {
self.files_stat_with_options(request::FilesStat {
path,
..Default::default()
})
.await
}
/// Display a file's status in MFS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.files_stat_with_options(
/// ipfs_api::request::FilesStat {
/// path: "/test/dir/",
/// with_local: Some(true),
/// }
/// );
/// ```
///
async fn files_stat_with_options(
&self,
options: request::FilesStat<'_>,
) -> Result<response::FilesStatResponse, Self::Error> {
self.request(options, None).await
}
/// Write to a mutable file in the filesystem.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
/// use std::fs::File;
///
/// let client = IpfsClient::default();
/// let file = File::open("test.json").unwrap();
/// let res = client.files_write("/test/file.json", true, true, file);
/// ```
///
async fn files_write<R>(
&self,
path: &str,
create: bool,
truncate: bool,
data: R,
) -> Result<response::FilesWriteResponse, Self::Error>
where
R: 'static + Read + Send + Sync,
{
let options = request::FilesWrite {
path,
create: Some(create),
truncate: Some(truncate),
..request::FilesWrite::default()
};
self.files_write_with_options(options, data).await
}
/// Write to a mutable file in the filesystem.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
/// use std::io::Cursor;
///
/// let client = IpfsClient::default();
/// let data = Cursor::new((1..128).collect::<Vec<u8>>());
/// #[cfg(feature = "with-builder")]
/// let req = ipfs_api::request::FilesWrite::builder()
/// .path("/test/outfile.bin")
/// .create(false)
/// .truncate(false)
/// .offset(1 << 20)
/// .flush(false)
/// // see FilesWriteBuilder for the full set of options
/// .build();
/// #[cfg(not(feature = "with-builder"))]
/// let req = ipfs_api::request::FilesWrite {
/// path: "/test/outfile.bin",
/// create: Some(false),
/// truncate: Some(false),
/// offset: Some(1 << 20),
/// flush: Some(false),
/// .. Default::default()
/// };
/// let res = client.files_write_with_options(req, data);
/// ```
///
async fn files_write_with_options<R>(
&self,
options: request::FilesWrite<'_>,
data: R,
) -> Result<response::FilesWriteResponse, Self::Error>
where
R: 'static + Read + Send + Sync,
{
let mut form = multipart::Form::default();
form.add_reader("data", data);
self.request_empty(options, Some(form)).await
}
/// Change the cid version or hash function of the root node of a given path.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
/// use std::fs::File;
///
/// let client = IpfsClient::default();
/// let res = client.files_chcid("/test/", 1);
/// ```
///
/// Not specifying a byte `count` writes the entire input.
///
async fn files_chcid(
&self,
path: &str,
cid_version: i32,
) -> Result<response::FilesChcidResponse, Self::Error> {
self.request_empty(
request::FilesChcid {
path: Some(path),
cid_version: Some(cid_version),
..Default::default()
},
None,
)
.await
}
/// Change the cid version or hash function of the root node of a given path.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
/// use std::fs::File;
///
/// let client = IpfsClient::default();
/// #[cfg(feature = "with-builder")]
/// let req = ipfs_api::request::FilesChcid::builder()
/// .path("/test/")
/// .cid_version(1)
/// .hash("sha3-512")
/// .flush(true)
/// .build();
/// #[cfg(not(feature = "with-builder"))]
/// let req = ipfs_api::request::FilesChcid {
/// path: Some("/test/"),
/// cid_version: Some(1),
/// hash: Some("sha3-512"),
/// flush: Some(false),
/// };
/// let res = client.files_chcid_with_options(req);
/// ```
///
/// Not specifying a byte `count` writes the entire input.
///
async fn files_chcid_with_options(
&self,
options: request::FilesChcid<'_>,
) -> Result<response::FilesChcidResponse, Self::Error> {
self.request_empty(options, None).await
}
/// List blocks that are both in the filestore and standard block storage.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.filestore_dups();
/// ```
///
fn filestore_dups(
&self,
) -> Box<dyn Stream<Item = Result<response::FilestoreDupsResponse, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::FilestoreDups, None) => request_stream_json
}
}
/// List objects in filestore.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.filestore_ls(
/// Some("QmYPP3BovR2m8UqCZxFbdXSit6SKgExxDkFAPLqiGsap4X")
/// );
/// ```
///
fn filestore_ls(
&self,
cid: Option<&str>,
) -> Box<dyn Stream<Item = Result<response::FilestoreLsResponse, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::FilestoreLs { cid }, None) => request_stream_json
}
}
/// Verify objects in filestore.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.filestore_verify(None);
/// ```
///
fn filestore_verify(
&self,
cid: Option<&str>,
) -> Box<dyn Stream<Item = Result<response::FilestoreVerifyResponse, Self::Error>> + Unpin>
{
impl_stream_api_response! {
(self, request::FilestoreVerify{ cid }, None) => request_stream_json
}
}
/// Download Ipfs object.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.get("/test/file.json");
/// ```
///
fn get(&self, path: &str) -> Box<dyn Stream<Item = Result<Bytes, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::Get { path }, None) => request_stream_bytes
}
}
/// Returns information about a peer.
///
/// If `peer` is `None`, returns information about you.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.id(None);
/// let res = client.id(Some("QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM"));
/// ```
///
async fn id(&self, peer: Option<&str>) -> Result<response::IdResponse, Self::Error> {
self.request(request::Id { peer }, None).await
}
/// Create a new keypair.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient, KeyType};
///
/// let client = IpfsClient::default();
/// let res = client.key_gen("test", KeyType::Rsa, 64);
/// ```
///
async fn key_gen(
&self,
name: &str,
kind: request::KeyType,
size: i32,
) -> Result<response::KeyGenResponse, Self::Error> {
self.request(request::KeyGen { name, kind, size }, None)
.await
}
/// List all local keypairs.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.key_list();
/// ```
///
async fn key_list(&self) -> Result<response::KeyListResponse, Self::Error> {
self.request(request::KeyList, None).await
}
/// Rename a keypair.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.key_rename("key_0", "new_name", false);
/// ```
///
async fn key_rename(
&self,
name: &str,
new: &str,
force: bool,
) -> Result<response::KeyRenameResponse, Self::Error> {
self.request(request::KeyRename { name, new, force }, None)
.await
}
/// Remove a keypair.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.key_rm("key_0");
/// ```
///
async fn key_rm(&self, name: &str) -> Result<response::KeyRmResponse, Self::Error> {
self.request(request::KeyRm { name }, None).await
}
/// Change the logging level for a logger.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient, Logger, LoggingLevel};
/// use std::borrow::Cow;
///
/// let client = IpfsClient::default();
/// let res = client.log_level(Logger::All, LoggingLevel::Debug);
/// let res = client.log_level(
/// Logger::Specific(Cow::Borrowed("web")),
/// LoggingLevel::Warning
/// );
/// ```
///
async fn log_level(
&self,
logger: request::Logger<'_>,
level: request::LoggingLevel,
) -> Result<response::LogLevelResponse, Self::Error> {
self.request(request::LogLevel { logger, level }, None)
.await
}
/// List all logging subsystems.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.log_ls();
/// ```
///
async fn log_ls(&self) -> Result<response::LogLsResponse, Self::Error> {
self.request(request::LogLs, None).await
}
/// Read the event log.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.log_tail();
/// ```
///
fn log_tail(&self) -> Box<dyn Stream<Item = Result<String, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::LogTail, None) |req| => {
self.request_stream(req, |res| {
Self::process_stream_response(res, LineDecoder).map_err(Self::Error::from)
})
}
}
}
/// List the contents of an Ipfs multihash.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.ls("/ipfs/QmVrLsEDn27sScp3k23sgZNefVTjSAL3wpgW1iWPi4MgoY");
/// ```
///
async fn ls(&self, path: &str) -> Result<response::LsResponse, Self::Error> {
self.request(
request::Ls {
path,
..Default::default()
},
None,
)
.await
}
/// List the contents of an Ipfs multihash.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// #[cfg(feature = "with-builder")]
/// let _ = client.ls_with_options(ipfs_api::request::Ls::builder()
/// .path("/ipfs/QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n")
/// .build()
/// );
/// let _ = client.ls_with_options(ipfs_api::request::Ls {
/// path: "/ipfs/QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n",
/// // Example options for fast listing
/// stream: Some(true),
/// resolve_type: Some(false),
/// size: Some(false),
/// });
/// ```
///
fn ls_with_options(
&self,
options: request::Ls<'_>,
) -> Box<dyn Stream<Item = Result<response::LsResponse, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, options, None) => request_stream_json
}
}
// TODO /mount
/// Publish an IPFS path to IPNS.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.name_publish(
/// "/ipfs/QmVrLsEDn27sScp3k23sgZNefVTjSAL3wpgW1iWPi4MgoY",
/// false,
/// Some("12h"),
/// None,
/// None
/// );
/// ```
///
async fn name_publish(
&self,
path: &str,
resolve: bool,
lifetime: Option<&str>,
ttl: Option<&str>,
key: Option<&str>,
) -> Result<response::NamePublishResponse, Self::Error> {
self.request(
request::NamePublish {
path,
resolve,
lifetime,
ttl,
key,
},
None,
)
.await
}
/// Resolve an IPNS name.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.name_resolve(
/// Some("/ipns/ipfs.io"),
/// true,
/// false
/// );
/// ```
///
async fn name_resolve(
&self,
name: Option<&str>,
recursive: bool,
nocache: bool,
) -> Result<response::NameResolveResponse, Self::Error> {
self.request(
request::NameResolve {
name,
recursive,
nocache,
},
None,
)
.await
}
/// Output the raw bytes of an Ipfs object.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.object_data("/ipfs/QmVrLsEDn27sScp3k23sgZNefVTjSAL3wpgW1iWPi4MgoY");
/// ```
///
fn object_data(&self, key: &str) -> Box<dyn Stream<Item = Result<Bytes, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::ObjectData { key }, None) => request_stream_bytes
}
}
/// Returns the diff of two Ipfs objects.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.object_diff(
/// "/ipfs/QmVrLsEDn27sScp3k23sgZNefVTjSAL3wpgW1iWPi4MgoY",
/// "/ipfs/QmXdNSQx7nbdRvkjGCEQgVjVtVwsHvV8NmV2a8xzQVwuFA"
/// );
/// ```
///
async fn object_diff(
&self,
key0: &str,
key1: &str,
) -> Result<response::ObjectDiffResponse, Self::Error> {
self.request(request::ObjectDiff { key0, key1 }, None).await
}
/// Returns the data in an object.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.object_get("/ipfs/QmVrLsEDn27sScp3k23sgZNefVTjSAL3wpgW1iWPi4MgoY");
/// ```
///
async fn object_get(&self, key: &str) -> Result<response::ObjectGetResponse, Self::Error> {
self.request(request::ObjectGet { key }, None).await
}
/// Returns the links that an object points to.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.object_links("/ipfs/QmVrLsEDn27sScp3k23sgZNefVTjSAL3wpgW1iWPi4MgoY");
/// ```
///
async fn object_links(&self, key: &str) -> Result<response::ObjectLinksResponse, Self::Error> {
self.request(request::ObjectLinks { key }, None).await
}
/// Create a new object.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient, ObjectTemplate};
///
/// let client = IpfsClient::default();
/// let res = client.object_new(None);
/// let res = client.object_new(Some(ObjectTemplate::UnixFsDir));
/// ```
///
async fn object_new(
&self,
template: Option<request::ObjectTemplate>,
) -> Result<response::ObjectNewResponse, Self::Error> {
self.request(request::ObjectNew { template }, None).await
}
// TODO /object/patch/add-link
// TODO /object/patch/append-data
// TODO /object/patch/rm-link
// TODO /object/patch/set-data
// TODO /object/put
/// Returns the stats for an object.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.object_stat("/ipfs/QmVrLsEDn27sScp3k23sgZNefVTjSAL3wpgW1iWPi4MgoY");
/// ```
///
async fn object_stat(&self, key: &str) -> Result<response::ObjectStatResponse, Self::Error> {
self.request(request::ObjectStat { key }, None).await
}
// TODO /p2p/listener/close
// TODO /p2p/listener/ls
// TODO /p2p/listener/open
// TODO /p2p/stream/close
// TODO /p2p/stream/dial
// TODO /p2p/stream/ls
/// Pins a new object.
///
/// The "recursive" option tells the server whether to
/// pin just the top-level object, or all sub-objects
/// it depends on. For most cases you want it to be `true`.
///
/// Does not yet implement the "progress" agument because
/// reading it is kinda squirrelly.
///
/// # Examples
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.pin_add("QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ", true);
/// ```
///
async fn pin_add(
&self,
key: &str,
recursive: bool,
) -> Result<response::PinAddResponse, Self::Error> {
self.request(
request::PinAdd {
key,
recursive: Some(recursive),
progress: false,
},
None,
)
.await
}
/// Returns a list of pinned objects in local storage.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.pin_ls(None, None);
/// let res = client.pin_ls(
/// Some("/ipfs/QmVrLsEDn27sScp3k23sgZNefVTjSAL3wpgW1iWPi4MgoY"),
/// None
/// );
/// let res = client.pin_ls(None, Some("direct"));
/// ```
///
async fn pin_ls(
&self,
key: Option<&str>,
typ: Option<&str>,
) -> Result<response::PinLsResponse, Self::Error> {
self.request(request::PinLs { key, typ }, None).await
}
/// Removes a pinned object from local storage.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.pin_rm(
/// "/ipfs/QmVrLsEDn27sScp3k23sgZNefVTjSAL3wpgW1iWPi4MgoY",
/// false
/// );
/// let res = client.pin_rm(
/// "/ipfs/QmVrLsEDn27sScp3k23sgZNefVTjSAL3wpgW1iWPi4MgoY",
/// true
/// );
/// ```
///
async fn pin_rm(
&self,
key: &str,
recursive: bool,
) -> Result<response::PinRmResponse, Self::Error> {
self.request(request::PinRm { key, recursive }, None).await
}
// TODO /pin/update
// TODO /pin/verify
/// Pings a peer.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.ping("QmSoLV4Bbm51jM9C4gDYZQ9Cy3U6aXMJDAbzgu2fzaDs64", None);
/// let res = client.ping("QmSoLV4Bbm51jM9C4gDYZQ9Cy3U6aXMJDAbzgu2fzaDs64", Some(15));
/// ```
///
fn ping(
&self,
peer: &str,
count: Option<i32>,
) -> Box<dyn Stream<Item = Result<response::PingResponse, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::Ping { peer, count }, None) => request_stream_json
}
}
/// List subscribed pubsub topics.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.pubsub_ls();
/// ```
///
async fn pubsub_ls(&self) -> Result<response::PubsubLsResponse, Self::Error> {
self.request(request::PubsubLs, None).await
}
/// List peers that are being published to.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.pubsub_peers(None);
/// let res = client.pubsub_peers(Some("feed"));
/// ```
///
async fn pubsub_peers(
&self,
topic: Option<&str>,
) -> Result<response::PubsubPeersResponse, Self::Error> {
self.request(request::PubsubPeers { topic }, None).await
}
/// Publish a message to a topic.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.pubsub_pub("feed", "Hello World!");
/// ```
///
async fn pubsub_pub(
&self,
topic: &str,
payload: &str,
) -> Result<response::PubsubPubResponse, Self::Error> {
self.request_empty(request::PubsubPub { topic, payload }, None)
.await
}
/// Subscribes to a pubsub topic.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.pubsub_sub("feed", false);
/// let res = client.pubsub_sub("feed", true);
/// ```
///
fn pubsub_sub(
&self,
topic: &str,
discover: bool,
) -> Box<dyn Stream<Item = Result<response::PubsubSubResponse, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::PubsubSub { topic, discover }, None) => request_stream_json
}
}
/// Gets a list of local references.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.refs_local();
/// ```
///
fn refs_local(
&self,
) -> Box<dyn Stream<Item = Result<response::RefsLocalResponse, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::RefsLocal, None) => request_stream_json
}
}
// TODO /repo/fsck
// TODO /repo/gc
// TODO /repo/stat
// TODO /repo/verify
// TODO /repo/version
// TODO /resolve
/// Shutdown the Ipfs daemon.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.shutdown();
/// ```
///
async fn shutdown(&self) -> Result<response::ShutdownResponse, Self::Error> {
self.request_empty(request::Shutdown, None).await
}
/// Returns bitswap stats.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.stats_bitswap();
/// ```
///
async fn stats_bitswap(&self) -> Result<response::StatsBitswapResponse, Self::Error> {
self.request(request::StatsBitswap, None).await
}
/// Returns bandwidth stats.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.stats_bw();
/// ```
///
async fn stats_bw(&self) -> Result<response::StatsBwResponse, Self::Error> {
self.request(request::StatsBw, None).await
}
/// Returns repo stats.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.stats_repo();
/// ```
///
async fn stats_repo(&self) -> Result<response::StatsRepoResponse, Self::Error> {
self.request(request::StatsRepo, None).await
}
// TODO /swarm/addrs/listen
/// Return a list of local addresses.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.swarm_addrs_local();
/// ```
///
async fn swarm_addrs_local(&self) -> Result<response::SwarmAddrsLocalResponse, Self::Error> {
self.request(request::SwarmAddrsLocal, None).await
}
// TODO /swarm/connect
// TODO /swarm/disconnect
// TODO /swarm/filters/add
// TODO /swarm/filters/rm
/// Return a list of peers with open connections.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.swarm_peers();
/// ```
///
async fn swarm_peers(&self) -> Result<response::SwarmPeersResponse, Self::Error> {
self.request(request::SwarmPeers, None).await
}
/// Add a tar file to Ipfs.
///
/// Note: `data` should already be a tar file. If it isn't the Api will return
/// an error.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
/// use std::fs::File;
///
/// let client = IpfsClient::default();
/// let tar = File::open("/path/to/file.tar").unwrap();
/// let res = client.tar_add(tar);
/// ```
///
async fn tar_add<R>(&self, data: R) -> Result<response::TarAddResponse, Self::Error>
where
R: 'static + Read + Send + Sync,
{
let mut form = multipart::Form::default();
form.add_reader("file", data);
self.request(request::TarAdd, Some(form)).await
}
/// Export a tar file from Ipfs.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.tar_cat("/ipfs/QmVrLsEDn27sScp3k23sgZNefVTjSAL3wpgW1iWPi4MgoY");
/// ```
///
fn tar_cat(&self, path: &str) -> Box<dyn Stream<Item = Result<Bytes, Self::Error>> + Unpin> {
impl_stream_api_response! {
(self, request::TarCat { path }, None) => request_stream_bytes
}
}
/// Returns information about the Ipfs server version.
///
/// ```no_run
/// use ipfs_api::{IpfsApi, IpfsClient};
///
/// let client = IpfsClient::default();
/// let res = client.version();
/// ```
///
async fn version(&self) -> Result<response::VersionResponse, Self::Error> {
self.request(request::Version, None).await
}
}
impl<B> IpfsApi for B where B: Backend {}
|
macro_rules! float_eq {
($lhs:expr, $rhs:expr) => {
float_eq!($lhs, $rhs, std::f64::EPSILON)
};
($lhs:expr, $rhs:expr, $epsilon:expr) => {
($lhs - $rhs).abs() < $epsilon
};
}
macro_rules! float_eq_cero {
($lhs:expr) => {
float_eq_cero!($lhs, std::f64::EPSILON)
};
($lhs:expr, $epsilon:expr) => {
$lhs.abs() < $epsilon
};
}
|
use crate::function::ErlangResult;
use crate::term::{OpaqueTerm, Term, TermType};
/// This is an intrinsic expected by the compiler to be defined as part of the runtime, and is used for runtime type checking
#[export_name = "__firefly_builtin_typeof"]
pub extern "C" fn r#typeof(value: OpaqueTerm) -> TermType {
value.r#typeof()
}
/// This is an intrinsic expected by the compiler to be defined as part of the runtime, and is used for runtime type checking
#[export_name = "__firefly_builtin_is_atom"]
pub extern "C" fn is_atom(value: OpaqueTerm) -> bool {
value.is_atom()
}
/// This is an intrinsic expected by the compiler to be defined as part of the runtime, and is used for runtime type checking
#[export_name = "__firefly_builtin_is_number"]
pub extern "C" fn is_number(value: OpaqueTerm) -> bool {
value.is_number()
}
/// This is an intrinsic expected by the compiler to be defined as part of the runtime, and is used for runtime type checking
#[export_name = "__firefly_builtin_is_tuple"]
pub extern "C" fn is_tuple(value: OpaqueTerm) -> ErlangResult<u32, u32> {
match value.into() {
Term::Tuple(tup) => ErlangResult::Ok(unsafe { tup.as_ref().len() as u32 }),
_ => ErlangResult::Err(0),
}
}
/// This is an intrinsic expected by the compiler to be defined as part of the runtime
#[export_name = "__firefly_builtin_size"]
pub extern "C" fn size(value: OpaqueTerm) -> usize {
value.size()
}
#[export_name = "erlang:is_atom/1"]
pub extern "C" fn is_atom1(value: OpaqueTerm) -> ErlangResult {
ErlangResult::Ok(value.is_atom().into())
}
#[export_name = "erlang:is_list/1"]
pub extern "C" fn is_list1(value: OpaqueTerm) -> ErlangResult {
ErlangResult::Ok(value.is_list().into())
}
#[export_name = "erlang:is_binary/1"]
pub extern "C" fn is_binary1(value: OpaqueTerm) -> ErlangResult {
ErlangResult::Ok((value.r#typeof() == TermType::Binary).into())
}
#[export_name = "erlang:is_function/1"]
pub extern "C" fn is_function1(value: OpaqueTerm) -> ErlangResult {
ErlangResult::Ok((value.r#typeof() == TermType::Closure).into())
}
|
pub mod grid;
pub mod out;
pub mod process;
pub mod trim;
#[cfg(feature = "crossterm")]
pub mod crossterm;
|
//! Module provides wrapper for types that cannot be dropped silently.
//! Usually such types are required to be returned to their creator.
//! `Escape` wrapper help the user to do so by sending underlying value to the `Terminal` when it is dropped.
//! Users are encouraged to dispose of the values manually while `Escape` be just a safety net.
use std::{
iter::repeat,
mem::{forget, ManuallyDrop},
ops::{Deref, DerefMut},
ptr::read,
};
use crossbeam_channel::{unbounded, Receiver, Sender};
/// Wraps value of any type and send it to the `Terminal` from which the wrapper was created.
/// In case `Terminal` is already dropped then value will be cast into oblivion via `std::mem::forget`.
#[derive(Debug, Clone)]
pub(crate) struct Escape<T> {
value: ManuallyDrop<T>,
sender: Sender<T>,
}
impl<T> Escape<T> {
/// Unwrap the value.
pub(crate) fn into_inner(escape: Self) -> T {
Self::deconstruct(escape).0
}
fn deconstruct(mut escape: Self) -> (T, Sender<T>) {
unsafe {
let value = read(&mut *escape.value);
let sender = read(&mut escape.sender);
forget(escape);
(value, sender)
}
}
}
impl<T> Deref for Escape<T> {
type Target = T;
fn deref(&self) -> &T {
&*self.value
}
}
impl<T> DerefMut for Escape<T> {
fn deref_mut(&mut self) -> &mut T {
&mut *self.value
}
}
impl<T> Drop for Escape<T> {
fn drop(&mut self) {
let value = unsafe { read(&mut *self.value) };
self.sender.send(value)
}
}
/// This types allows the user to create `Escape` wrappers.
/// Receives values from dropped `Escape` instances that was created by this `Terminal`.
#[derive(Debug)]
pub(crate) struct Terminal<T> {
receiver: Receiver<T>,
sender: ManuallyDrop<Sender<T>>,
}
impl<T> Default for Terminal<T> {
fn default() -> Self {
Self::new()
}
}
impl<T> Terminal<T> {
/// Create new `Terminal`.
pub(crate) fn new() -> Self {
let (sender, receiver) = unbounded();
Terminal {
sender: ManuallyDrop::new(sender),
receiver,
}
}
/// Wrap the value. It will be yielded by iterator returned by `Terminal::drain` if `Escape` will be dropped.
pub(crate) fn escape(&self, value: T) -> Escape<T> {
Escape {
value: ManuallyDrop::new(value),
sender: Sender::clone(&self.sender),
}
}
// /// Check if `Escape` will send value to this `Terminal`.
// pub(crate) fn owns(&self, escape: &Escape<T>) -> bool {
// *self.sender == escape.sender
// }
/// Get iterator over values from dropped `Escape` instances that was created by this `Terminal`.
pub(crate) fn drain<'a>(&'a mut self) -> impl Iterator<Item = T> + 'a {
repeat(()).scan((), move |&mut (), ()| self.receiver.try_recv())
}
}
impl<T> Drop for Terminal<T> {
fn drop(&mut self) {
unsafe {
ManuallyDrop::drop(&mut self.sender);
match self.receiver.recv() {
None => {}
_ => {
panic!("Terminal must be dropped after all `Escape`s");
}
}
}
}
}
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![feature(specialization)]
// Regression test for ICE when combining specialized associated types and type
// aliases
trait Id_ {
type Out;
}
type Id<T> = <T as Id_>::Out;
impl<T> Id_ for T {
default type Out = T;
}
fn test_proection() {
let x: Id<bool> = panic!();
}
fn main() {
}
|
use std::fmt;
#[derive(Serialize, Deserialize)]
pub struct Todo {
id: Option<u32>,
name: String,
complete: Option<bool>,
}
impl Todo {
pub fn new(id: u32, name: String, complete: bool) -> Self {
Self {
id: Some(id),
name,
complete: Some(complete),
}
}
pub fn get_name(&self) -> &String {
&self.name
}
pub fn is_complete(&self) -> bool {
self.complete.unwrap_or(false)
}
pub fn mark_complete(&mut self) {
self.complete = Some(true);
}
}
impl fmt::Display for Todo {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let complete = if self.is_complete() { "✅" } else { "❎" };
write!(f, "({}) {} - {}", self.id.unwrap(), self.name, complete)
}
}
|
use std::env;
use minigrep::Config;
fn main() {
let args: Vec<String> = env::args().collect();
println!("{:?}", args);
let config = Config::new(&args);
minigrep::run(config);
}
|
mod shared;
#[cfg(feature = "local-testing")]
#[tokio::test]
async fn test_lobbies() {
use shared::ds::{self, lobby};
shared::init_logger();
let dual = shared::make_dual_clients(ds::Subscriptions::LOBBY)
.await
.expect("failed to start clients");
let shared::DualClients { one, two } = dual;
let mut events = one.events;
tokio::task::spawn(async move {
while let Some(event) = events.recv().await {
tracing::debug!(which = 1, event = ?event);
}
});
let mut events = two.events;
tokio::task::spawn(async move {
while let Some(event) = events.recv().await {
tracing::debug!(which = 2, event = ?event);
}
});
let _one_user = one.user;
let two_user = two.user;
let one = one.discord;
let two = two.discord;
tracing::info!("1 => creating lobby");
let mut lobby = one
.create_lobby(
lobby::CreateLobbyBuilder::new()
.capacity(std::num::NonZeroU32::new(2))
.add_metadata(std::iter::once(("crab".to_owned(), "1".to_owned()))),
)
.await
.expect("failed to create lobby");
// The SEARCH_LOBBIES command appears to be completely broken so I have filed
// a bug on it
// tracing::info!("2 => searching for lobby");
// let found_lobbies = two
// .discord
// .search_lobbies(
// lobby::SearchQuery::default()
// // .add_filter(
// // lobby::SearchKey::OwnerId,
// // lobby::LobbySearchComparison::Equal,
// // lobby::SearchValue::number(one.user.id.0),
// // )
// .add_filter(
// "crab",
// lobby::LobbySearchComparison::Equal,
// lobby::SearchValue::number(1),
// )
// .distance(lobby::LobbySearchDistance::Global)
// .limit(std::num::NonZeroU32::new(1)),
// )
// .await
// .expect("failed to search lobbies");
// let found_lobby = found_lobbies.first().expect("failed to find lobby");
// assert_eq!(lobby.id, found_lobby.id);
tracing::info!("2 => connecting to lobby");
let connected_lobby = two
.connect_lobby(lobby::ConnectLobby {
id: lobby.id,
secret: lobby.secret.clone(),
})
.await
.expect("failed to connect to lobby");
assert_eq!(lobby.id, connected_lobby.id);
let mut md = lobby::Metadata::new();
md.insert("one".to_owned(), "1".to_owned());
md.insert("two".to_owned(), "2".to_owned());
assert!(two
.update_lobby_member(lobby.id, two_user.id, md)
.await
.is_ok());
tracing::info!("1 => changing lobby ownership");
let update = one
.update_lobby(lobby::UpdateLobbyBuilder::new(&lobby).owner(Some(two_user.id)))
.await
.unwrap();
update.modify(&mut lobby);
let lobby_id = lobby.id;
let one_msg = tokio::task::spawn(async move {
assert!(one
.send_lobby_message(lobby_id, lobby::LobbyMessage::text("I'm leaving"))
.await
.is_ok());
one
});
let two_msg = tokio::task::spawn(async move {
assert!(two
.send_lobby_message(
lobby_id,
lobby::LobbyMessage::binary(b"that makes me very sad".to_vec()),
)
.await
.is_ok());
two
});
let (one, two) = tokio::join!(one_msg, two_msg);
let one = one.unwrap();
let two = two.unwrap();
tracing::info!("1 => disconnecting from lobby");
one.disconnect_lobby(lobby.id)
.await
.expect("disconnected from lobby");
// Wait a bit, Discord responds to this quickly but if we try to connect
// too quickly it will be angry with us since we're "already connected"
tokio::time::sleep(std::time::Duration::from_secs(5)).await;
tracing::info!("1 => connecting to lobby");
one.connect_lobby(lobby::ConnectLobby {
id: lobby.id,
secret: lobby.secret.clone(),
})
.await
.expect("connected to lobby");
tracing::info!("1 => disconnecting from lobby");
one.disconnect_lobby(lobby.id)
.await
.expect("disconnected from lobby");
tracing::info!("2 => deleting lobby");
two.delete_lobby(lobby.id).await.expect("deleted lobby");
one.disconnect().await;
two.disconnect().await;
}
|
use tokio::net::TcpStream;
use tokio::io::AsyncRead;
use tokio::io::AsyncWrite;
use tokio_io::io::write_all;
use tokio;
use failure::Error;
use std::net::SocketAddr;
use net2::TcpBuilder;
mod copy;
use std::sync::Arc;
use crate::relay::TcpRouter;
use bytes::Bytes;
use tokio::reactor::Handle;
use self::copy::copy_verbose;
use std::net::IpAddr;
use std::net;
use crate::conf::RoutingAction;
use crate::conf::EgressAddr;
use crate::relay::inspect::parse_first_packet;
use crate::relay::inspect::TcpProtocol;
use asocks5::connect_socks_to;
use asocks5::socks::Address;
pub async fn handle_incoming_tcp(
mut client_stream: TcpStream,
a: SocketAddr,
router: Arc<TcpRouter>,
)-> Result<(), Error> {
let tcp = await!(parse_first_packet(&mut client_stream))?;
if let Some(r) = router.route(a, &tcp.protocol) {
await!(carry_out(
tcp.bytes.freeze(), a, r.clone(), client_stream,
tcp.protocol,
))?;
} else {
let p = client_stream.peer_addr();
return Err(format_err!(
"No matching rule for protocol {:?} from client {:?} to addr {:?}",
&tcp.protocol, p, a
));
}
Ok(())
}
async fn carry_out(
data: Bytes,
a: SocketAddr,
r: RoutingAction,
client_stream: TcpStream,
pr: TcpProtocol,
)-> Result<(), Error> {
let s = match r {
RoutingAction::Reset => return Ok(()),
RoutingAction::Direct => {
await!(tokio::net::TcpStream::connect(&a))
.map_err(|e| format_err!("Error making direct {:?} connection to {:?}: {}", &pr, a, e))
},
RoutingAction::Named(ref g) => match g.val().addr() {
EgressAddr::From(ip) => {
let x = bind_tcp_socket(ip)?;
await!(tokio::net::TcpStream::connect_std(x, &a, &Handle::default()))
.map_err(|e| format_err!("Error making direct {:?} connection to {:?} from {:?}: {}", &pr, a, ip, e))
},
EgressAddr::Socks5(x)=> {
let mut s = await!(TcpStream::connect(&x))?;
await!(connect_socks_to(&mut s, Address::SocketAddress(a)))?;
Ok(s)
},
}
}?;
let (stream, _) = await!(write_all(s, data))
.map_err(|e| {
format_err!("Error sending {:?} header bytes to {:?}: {}", &pr, a, e)
})?;
let (ur, uw) = stream.split();
let (cr, cw) = client_stream.split();
run_copy(ur, cw, a, pr.clone(), r.clone(), true);
run_copy(cr, uw, a, pr, r, false);
Ok(())
}
fn run_copy<R, W>(reader: R, writer: W, a: SocketAddr, p: TcpProtocol, r: RoutingAction, s_to_c: bool)
where R: AsyncRead + Send + 'static,
W: AsyncWrite + Send + 'static {
tokio::spawn_async(async move {
if let Err(e) = await!(copy_verbose(reader, writer)) {
if s_to_c {
if e.is_read() {
warn!("Error reading proto {:?} from server {:?} via {}: {}", p, a, r, e);
}
} else if !e.is_read() {
warn!("Error writing proto {:?} to server {:?} via {}: {}", p, a, r, e);
}
}
})
}
fn bind_tcp_socket(ip: IpAddr)-> Result<net::TcpStream, Error> {
let builder = if ip.is_ipv4() { TcpBuilder::new_v4() } else { TcpBuilder::new_v6() }?;
let builder = builder.bind((ip, 0))?;
builder.to_tcp_stream().map_err(|e| e.into())
} |
#[doc = "Reader of register TEST"]
pub type R = crate::R<u8, super::TEST>;
#[doc = "Writer for register TEST"]
pub type W = crate::W<u8, super::TEST>;
#[doc = "Register TEST `reset()`'s with value 0"]
impl crate::ResetValue for super::TEST {
type Type = u8;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `FORCEFS`"]
pub type FORCEFS_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FORCEFS`"]
pub struct FORCEFS_W<'a> {
w: &'a mut W,
}
impl<'a> FORCEFS_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u8) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `FIFOACC`"]
pub type FIFOACC_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FIFOACC`"]
pub struct FIFOACC_W<'a> {
w: &'a mut W,
}
impl<'a> FIFOACC_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u8) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `FORCEH`"]
pub type FORCEH_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FORCEH`"]
pub struct FORCEH_W<'a> {
w: &'a mut W,
}
impl<'a> FORCEH_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u8) & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bit 5 - Force Full-Speed Mode"]
#[inline(always)]
pub fn forcefs(&self) -> FORCEFS_R {
FORCEFS_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - FIFO Access"]
#[inline(always)]
pub fn fifoacc(&self) -> FIFOACC_R {
FIFOACC_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Force Host Mode"]
#[inline(always)]
pub fn forceh(&self) -> FORCEH_R {
FORCEH_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 5 - Force Full-Speed Mode"]
#[inline(always)]
pub fn forcefs(&mut self) -> FORCEFS_W {
FORCEFS_W { w: self }
}
#[doc = "Bit 6 - FIFO Access"]
#[inline(always)]
pub fn fifoacc(&mut self) -> FIFOACC_W {
FIFOACC_W { w: self }
}
#[doc = "Bit 7 - Force Host Mode"]
#[inline(always)]
pub fn forceh(&mut self) -> FORCEH_W {
FORCEH_W { w: self }
}
}
|
use std::error::Error;
use std::fs::File;
use std::io::{BufReader, BufWriter};
use compression::{encode, decode};
fn main() -> Result<(), Box<dyn Error>> {
let uncompressed_path = "./test/Grimms";
let compressed_path = "./test/Grimms.huffman";
let book = BufReader::new(File::open(uncompressed_path)?);
let out = BufWriter::new(File::create(compressed_path)?);
encode(book, out)?;
let compressed = BufReader::new(File::open(compressed_path)?);
let out = BufWriter::new(File::create("./test/Grimms.decompressed")?);
decode(compressed, out)?;
Ok(())
} |
use std::collections::HashMap;
use std::fmt;
use firefly_diagnostics::SourceSpan;
use firefly_intern::Symbol;
use crate::lexer::{DelayedSubstitution, LexicalToken, Token};
use crate::lexer::{IdentToken, SymbolToken};
use super::directives::Define;
use super::token_reader::{ReadFrom, TokenReader};
use super::types::{MacroArgs, MacroName};
use super::Result;
pub enum MacroIdent {
Const(Symbol),
Func(Symbol, usize),
}
impl MacroIdent {
pub fn ident(&self) -> Symbol {
match self {
MacroIdent::Const(sym) => *sym,
MacroIdent::Func(sym, _) => *sym,
}
}
pub fn arity(&self) -> Option<usize> {
match self {
MacroIdent::Const(_) => None,
MacroIdent::Func(_, arity) => Some(*arity),
}
}
}
impl From<&MacroCall> for MacroIdent {
fn from(call: &MacroCall) -> MacroIdent {
let ident = match &call.name {
MacroName::Atom(tok) => tok.symbol(),
MacroName::Variable(tok) => tok.symbol(),
};
if let Some(args) = &call.args {
MacroIdent::Func(ident, args.len())
} else {
MacroIdent::Const(ident)
}
}
}
impl From<&super::directives::Define> for MacroIdent {
fn from(def: &super::directives::Define) -> MacroIdent {
let ident = match &def.name {
MacroName::Atom(tok) => tok.symbol(),
MacroName::Variable(tok) => tok.symbol(),
};
if let Some(args) = &def.variables {
MacroIdent::Func(ident, args.len())
} else {
MacroIdent::Const(ident)
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MacroContainer {
func_defines: HashMap<Symbol, HashMap<usize, MacroDef>>,
const_defines: HashMap<Symbol, MacroDef>,
}
impl MacroContainer {
pub fn new() -> Self {
MacroContainer {
func_defines: HashMap::new(),
const_defines: HashMap::new(),
}
}
pub fn insert<T>(&mut self, key: T, def: MacroDef) -> bool
where
T: Into<MacroIdent>,
{
let key: MacroIdent = key.into();
match key {
MacroIdent::Const(name) => self.const_defines.insert(name, def).is_some(),
MacroIdent::Func(name, arity) => {
if !self.func_defines.contains_key(&name) {
self.func_defines.insert(name, HashMap::new());
}
let container = self.func_defines.get_mut(&name).unwrap();
container.insert(arity, def).is_some()
}
}
}
pub fn get<'a, T>(&'a self, key: T) -> Option<&'a MacroDef>
where
T: Into<MacroIdent>,
{
let key: MacroIdent = key.into();
match key {
MacroIdent::Const(name) => self.const_defines.get(&name),
MacroIdent::Func(name, arity) => {
self.func_defines.get(&name).and_then(|c| c.get(&arity))
}
}
}
pub fn undef(&mut self, symbol: &Symbol) -> bool {
let mut res = false;
res |= self.const_defines.remove(symbol).is_some();
res |= self.func_defines.remove(symbol).is_some();
res
}
pub fn defined(&self, symbol: &Symbol) -> bool {
self.defined_const(symbol) || self.defined_func(symbol)
}
pub fn defined_const(&self, symbol: &Symbol) -> bool {
self.const_defines.contains_key(symbol)
}
pub fn defined_func(&self, symbol: &Symbol) -> bool {
self.func_defines.contains_key(symbol)
}
}
/// Macro Definition.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum MacroDef {
Boolean(bool),
Atom(Symbol),
String(Symbol),
Static(Define),
Dynamic(Vec<LexicalToken>),
DelayedSubstitution(DelayedSubstitution),
}
impl MacroDef {
/// Returns `true` if this macro has variables, otherwise `false`.
pub fn has_variables(&self) -> bool {
match *self {
MacroDef::Static(ref d) => d.variables.is_some(),
MacroDef::Dynamic(_) => false,
MacroDef::Atom(_) => false,
MacroDef::String(_) => false,
MacroDef::Boolean(_) => false,
MacroDef::DelayedSubstitution(_) => false,
}
}
}
/// Macro call.
#[derive(Debug, Clone)]
pub struct MacroCall {
pub _question: SymbolToken,
pub name: MacroName,
pub args: Option<MacroArgs>,
}
impl MacroCall {
pub fn span(&self) -> SourceSpan {
let start = self._question.0;
let end = self
.args
.as_ref()
.map(|a| a.span().end())
.unwrap_or_else(|| self.name.span().end());
SourceSpan::new(start, end)
}
pub fn name(&self) -> Symbol {
self.name.symbol()
}
}
impl fmt::Display for MacroCall {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"?{}{}",
self.name.symbol(),
self.args.as_ref().map_or("".to_string(), |a| a.to_string())
)
}
}
impl ReadFrom for MacroCall {
fn read_from<R, S>(reader: &mut R) -> Result<Self>
where
R: TokenReader<Source = S>,
{
Ok(MacroCall {
_question: reader.read_expected(&Token::Question)?,
name: reader.read()?,
args: reader.try_read()?,
})
}
}
#[derive(Debug, Clone)]
pub struct NoArgsMacroCall {
pub _question: SymbolToken,
pub name: MacroName,
}
impl NoArgsMacroCall {
pub fn span(&self) -> SourceSpan {
SourceSpan::new(self._question.span().start(), self.name.span().end())
}
}
impl ReadFrom for NoArgsMacroCall {
fn read_from<R, S>(reader: &mut R) -> Result<Self>
where
R: TokenReader<Source = S>,
{
Ok(NoArgsMacroCall {
_question: reader.read_expected(&Token::Question)?,
name: reader.read()?,
})
}
}
#[derive(Debug, Clone)]
pub struct Stringify {
pub _double_question: SymbolToken,
pub name: IdentToken,
}
impl Stringify {
pub fn span(&self) -> SourceSpan {
let start = self._double_question.0;
let end = self.name.2;
SourceSpan::new(start, end)
}
}
impl fmt::Display for Stringify {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "??{}", self.name)
}
}
impl ReadFrom for Stringify {
fn read_from<R, S>(reader: &mut R) -> Result<Self>
where
R: TokenReader<Source = S>,
{
Ok(Stringify {
_double_question: reader.read_expected(&Token::DoubleQuestion)?,
name: reader.read()?,
})
}
}
|
///! OpenType Variations common tables
/// Item Variation Store (used in `MVAR`, etc.)
mod itemvariationstore;
/// Structs to store locations (user and normalized)
mod locations;
/// Structs for storing packed deltas within a tuple variation store
mod packeddeltas;
/// Structs for storing packed points
mod packedpoints;
/// Headers locating variation data within a tuple variation store
mod tuplevariationheader;
/// Tuple Variation Store
mod tuplevariationstore;
use otspec::types::int16;
/// Represents either a two-dimensional (`gvar`) or one-dimensional (`cvt`) delta value
#[derive(Debug, PartialEq)]
pub enum Delta {
/// A one-dimensional delta (used in the `cvt` table)
Delta1D(int16),
/// A two-dimensional delta (used in the `gvar` table)
Delta2D((int16, int16)),
}
impl Delta {
/// Assuming that this is a two-dimensional delta, returns the delta as a
/// X,Y coordinate tuple.
pub fn get_2d(&self) -> (int16, int16) {
if let Delta::Delta2D(p) = self {
*p
} else {
panic!("Tried to turn a scalar delta into a coordinate delta");
}
}
}
pub use crate::otvar::itemvariationstore::{
ItemVariationData, ItemVariationStore, RegionAxisCoordinates,
};
pub use crate::otvar::packeddeltas::{PackedDeltas, PackedDeltasDeserializer};
pub use crate::otvar::packedpoints::PackedPoints;
pub use crate::otvar::tuplevariationheader::{
TupleIndexFlags, TupleVariationHeader, TupleVariationHeaderDeserializer,
};
pub use crate::otvar::tuplevariationstore::{
TupleVariation, TupleVariationStore, TupleVariationStoreDeserializer,
};
#[cfg(test)]
mod tests {
use crate::otvar;
#[test]
fn otvar_de_ivd() {
let binary_ivd = vec![
0x00, 0x04, 0x00, 0x01, 0x00, 0x01, 0x00, 0x00, 0xFF, 0x38, 0xFF, 0xCE, 0x00, 0x64,
0x00, 0xC8,
];
let fivd = otvar::ItemVariationData {
regionIndexes: vec![0],
deltaValues: vec![vec![-200], vec![-50], vec![100], vec![200]],
};
let deserialized: otvar::ItemVariationData = otspec::de::from_bytes(&binary_ivd).unwrap();
assert_eq!(deserialized, fivd);
}
#[test]
fn otvar_de_ivs() {
let binary_ivs = vec![
0x00, 0x01, 0x00, 0x00, 0x00, 0x0C, 0x00, 0x01, 0x00, 0x00, 0x00, 0x16, 0x00, 0x01,
0x00, 0x01, 0x00, 0x00, 0x40, 0x00, 0x40, 0x00, 0x00, 0x04, 0x00, 0x01, 0x00, 0x01,
0x00, 0x00, 0xFF, 0x38, 0xFF, 0xCE, 0x00, 0x64, 0x00, 0xC8,
];
let deserialized: otvar::ItemVariationStore = otspec::de::from_bytes(&binary_ivs).unwrap();
let fivd = otvar::ItemVariationData {
regionIndexes: vec![0],
deltaValues: vec![vec![-200], vec![-50], vec![100], vec![200]],
};
let fivs = otvar::ItemVariationStore {
format: 1,
axisCount: 1,
variationRegions: vec![vec![otvar::RegionAxisCoordinates {
startCoord: 0.0,
peakCoord: 1.0,
endCoord: 1.0,
}]],
variationData: vec![fivd],
};
assert_eq!(deserialized, fivs);
}
}
|
#![cfg(test)]
extern crate wabt;
extern crate parity_wasm;
mod run;
macro_rules! run_test {
($label: expr, $test_name: ident) => (
#[test]
fn $test_name() {
self::run::spec($label)
}
);
}
run_test!("address", wasm_address);
run_test!("align", wasm_align);
run_test!("binary", wasm_binary);
run_test!("block", wasm_block);
run_test!("br", wasm_br);
run_test!("br_if", wasm_br_if);
run_test!("br_table", wasm_br_table);
run_test!("break-drop", wasm_break_drop);
run_test!("call", wasm_call);
run_test!("call_indirect", wasm_call_indirect);
run_test!("comments", wasm_comments);
run_test!("const", wasm_const);
run_test!("conversions", wasm_conversions);
run_test!("custom_section", wasm_custom_section);
run_test!("elem", wasm_elem);
run_test!("endianness", wasm_endianness);
run_test!("exports", wasm_exports);
run_test!("f32", wasm_f32);
run_test!("f32_bitwise", wasm_f32_bitwise);
run_test!("f32_cmp", wasm_f32_cmp);
run_test!("f64", wasm_f64);
run_test!("f64_bitwise", wasm_f64_bitwise);
run_test!("f64_cmp", wasm_f64_cmp);
run_test!("fac", wasm_fac);
run_test!("float_exprs", wasm_float_exprs);
run_test!("float_literals", wasm_float_literals);
run_test!("float_memory", wasm_float_memory);
run_test!("float_misc", wasm_float_misc);
run_test!("forward", wasm_forward);
run_test!("func", wasm_func);
run_test!("func_ptrs", wasm_func_ptrs);
run_test!("get_local", wasm_get_local);
run_test!("globals", wasm_globals);
run_test!("i32", wasm_i32);
run_test!("i64", wasm_i64);
run_test!("if", wasm_if);
run_test!("imports", wasm_imports);
run_test!("inline-module", inline_module);
run_test!("int_exprs", wasm_int_exprs);
run_test!("int_literals", wasm_int_literals);
run_test!("labels", wasm_labels);
run_test!("left-to-right", wasm_left_to_right);
run_test!("linking", wasm_linking);
run_test!("loop", wasm_loop);
run_test!("memory", wasm_memory);
run_test!("memory_redundancy", wasm_memory_redundancy);
run_test!("memory_trap", wasm_memory_trap);
run_test!("names", wasm_names);
run_test!("nop", wasm_nop);
run_test!("resizing", wasm_resizing);
run_test!("return", wasm_return);
run_test!("select", wasm_select);
run_test!("set_local", wasm_set_local);
run_test!("skip-stack-guard-page", wasm_skip_stack_guard_page);
run_test!("stack", wasm_stack);
run_test!("start", wasm_start);
run_test!("store_retval", wasm_store_retval);
run_test!("switch", wasm_switch);
run_test!("tee_local", wasm_tee_local);
run_test!("token", wasm_token);
run_test!("traps", wasm_traps);
run_test!("type", wasm_type);
run_test!("typecheck", wasm_typecheck);
run_test!("unreachable", wasm_unreachable);
run_test!("unreached-invalid", wasm_unreached_invalid);
run_test!("unwind", wasm_unwind);
run_test!("utf8-custom-section-id", wasm_utf8_custom_section_id);
run_test!("utf8-import-field", wasm_utf8_import_field);
run_test!("utf8-import-module", wasm_utf8_import_module);
run_test!("utf8-invalid-encoding", wasm_utf8_invalid_encoding); |
use super::*;
pub enum Tag {
A,
Abbr,
Acronym,
Address,
Applet,
Area,
Article,
Aside,
Audio,
B,
Base,
Basefont,
Bdi,
Bdo,
Big,
Blockquote,
Body,
Br,
Button,
Canvas,
Caption,
Center,
Cite,
Code,
Col,
Colgroup,
Data,
Datalist,
Dd,
Del,
Details,
Dfn,
Dialog,
Dir,
Div,
Dl,
Dt,
Em,
Embed,
Fieldset,
FigCaption,
Figure,
Font,
Footer,
Form,
Frame,
Frameset,
H1,
Head,
Header,
Hr,
Html,
I,
IFrame,
Img,
Input,
Ins,
Kbd,
Label,
Legend,
Li,
Link,
Main,
Map,
Mark,
Meta,
Meter,
Nav,
Noframes,
Noscript,
Object,
Ol,
Optgroup,
Option_,
Output,
P,
Param,
Picture,
Pre,
Progress,
Q,
Rp,
Rt,
Ruby,
S,
Samp,
Script,
Section,
Select,
Small,
Source,
Span,
Strike,
Strong,
Style,
Sub,
Summary,
Sup,
Svg,
Table,
Tbody,
Td,
Template,
TextArea,
Tfoot,
Th,
Thead,
Time,
Title,
Tr,
Track,
Tt,
U,
Ul,
Var,
Video,
Wbr,
}
impl std::convert::From<Tag> for String {
fn from(tag: Tag) -> Self {
match tag {
Tag::A => "a",
Tag::Abbr => "abbr",
Tag::Acronym => "acronym",
Tag::Address => "address",
Tag::Applet => "applet",
Tag::Area => "area",
Tag::Article => "article",
Tag::Aside => "aside",
Tag::Audio => "audio",
Tag::B => "b",
Tag::Base => "base",
Tag::Basefont => "basefont",
Tag::Bdi => "bdi",
Tag::Bdo => "bdo",
Tag::Big => "big",
Tag::Blockquote => "blockquote",
Tag::Body => "body",
Tag::Br => "br",
Tag::Button => "button",
Tag::Canvas => "canvas",
Tag::Caption => "caption",
Tag::Center => "center",
Tag::Cite => "cite",
Tag::Code => "code",
Tag::Col => "col",
Tag::Colgroup => "colgroup",
Tag::Data => "data",
Tag::Datalist => "datalist",
Tag::Dd => "dd",
Tag::Del => "del",
Tag::Details => "details",
Tag::Dfn => "dfn",
Tag::Dialog => "dialog",
Tag::Dir => "dir",
Tag::Div => "div",
Tag::Dl => "dl",
Tag::Dt => "dt",
Tag::Em => "em",
Tag::Embed => "embed",
Tag::Fieldset => "fieldset",
Tag::FigCaption => "figcaption",
Tag::Figure => "figure",
Tag::Font => "font",
Tag::Footer => "footer",
Tag::Form => "form",
Tag::Frame => "frame",
Tag::Frameset => "frameset",
Tag::H1 => "h1",
Tag::Head => "head",
Tag::Header => "header",
Tag::Hr => "hr",
Tag::Html => "html",
Tag::I => "i",
Tag::IFrame => "iframe",
Tag::Img => "img",
Tag::Input => "input",
Tag::Ins => "ins",
Tag::Kbd => "kbd",
Tag::Label => "label",
Tag::Legend => "legend",
Tag::Li => "li",
Tag::Link => "link",
Tag::Main => "main",
Tag::Map => "map",
Tag::Mark => "mark",
Tag::Meta => "meta",
Tag::Meter => "meter",
Tag::Nav => "nav",
Tag::Noframes => "noframes",
Tag::Noscript => "noscript",
Tag::Object => "object",
Tag::Ol => "ol",
Tag::Optgroup => "optgroup",
Tag::Option_ => "option",
Tag::Output => "output",
Tag::P => "p",
Tag::Param => "param",
Tag::Picture => "picture",
Tag::Pre => "pre",
Tag::Progress => "progress",
Tag::Q => "q",
Tag::Rp => "rp",
Tag::Rt => "rt",
Tag::Ruby => "ruby",
Tag::S => "s",
Tag::Samp => "samp",
Tag::Script => "script",
Tag::Section => "section",
Tag::Select => "select",
Tag::Small => "small",
Tag::Source => "source",
Tag::Span => "span",
Tag::Strike => "strike",
Tag::Strong => "strong",
Tag::Style => "style",
Tag::Sub => "sub",
Tag::Summary => "summary",
Tag::Sup => "sup",
Tag::Svg => "svg",
Tag::Table => "table",
Tag::Tbody => "tbody",
Tag::Td => "td",
Tag::Template => "template",
Tag::TextArea => "textarea",
Tag::Tfoot => "tfoot",
Tag::Th => "th",
Tag::Thead => "thead",
Tag::Time => "time",
Tag::Title => "title",
Tag::Tr => "tr",
Tag::Track => "track",
Tag::Tt => "tt",
Tag::U => "u",
Tag::Ul => "ul",
Tag::Var => "var",
Tag::Video => "video",
Tag::Wbr => "wbr",
}.into()
}
}
impl Tag {
/// Consumes this Tag and returns an El.
pub fn make_element(self) -> El {
use Tag::*;
match self {
Area | Base |
Basefont | Br |
Col | Embed |
Frame | Hr |
Img | Input |
Link | Meta |
Param | Source |
Track | Wbr
=> El {
name: self.into(),
is_text: false,
paired: false,
attributes: vec![],
style: vec![],
content: vec![],
},
_ => El {
name: self.into(),
is_text: false,
paired: true,
attributes: vec![],
style: vec![],
content: vec![],
},
}
}
/// Consumes a Tag and returns an El with the specified attributes.
pub fn attributes(self, attrs: &[Attr]) -> El {
self.make_element().attributes(attrs)
}
/// Consumes a Tag and returns an El with the specified style properties.
pub fn style(self, props: &[Prop]) -> El {
self.make_element().style(props)
}
/// Consumes a Tag and returns an El with the specified content (children).
pub fn content(self, content: &[El]) -> El {
self.make_element().content(content)
}
} |
use std::net::SocketAddr;
use std::path::PathBuf;
use clap::Clap;
use tracing::warn;
use bindle::{
invoice::signature::{KeyRing, SignatureRole},
provider, search,
server::{server, TlsConfig},
signature::SecretKeyFile,
SecretKeyEntry,
};
const DESCRIPTION: &str = r#"
The Bindle Server
Bindle is a technology for storing and retrieving aggregate applications.
This program runs an HTTP frontend for a Bindle repository.
"#;
#[derive(Clap, serde::Deserialize, Default)]
#[clap(name = "bindle-server", version = clap::crate_version!(), author = "DeisLabs at Microsoft Azure", about = DESCRIPTION)]
struct Opts {
#[clap(
short = 'i',
long = "address",
env = "BINDLE_IP_ADDRESS_PORT",
about = "the IP address and port to listen on [default: 127.0.0.1:8080]"
)]
address: Option<String>,
#[clap(
name = "bindle_directory",
short = 'd',
long = "directory",
env = "BINDLE_DIRECTORY",
about = "the path to the directory in which bindles will be stored [default: $XDG_DATA_HOME/bindle]"
)]
bindle_directory: Option<PathBuf>,
#[clap(
name = "cert_path",
short = 'c',
long = "tls-cert",
env = "BINDLE_TLS_CERT",
requires = "key_path",
about = "the path to the TLS certificate to use. If set, --key-path must be set as well. If not set, the server will use HTTP"
)]
cert_path: Option<PathBuf>,
#[clap(
name = "key_path",
short = 'k',
long = "tls-key",
env = "BINDLE_TLS_KEY",
requires = "cert_path",
about = "the path to the TLS certificate key to use. If set, --cert-path must be set as well. If not set, the server will use HTTP"
)]
key_path: Option<PathBuf>,
#[clap(
name = "config_file",
long = "config-path",
about = "the path to a configuration file"
)]
config_file: Option<PathBuf>,
#[clap(
name = "keyring",
short = 'r',
long = "keyring",
about = "the path to the public keyring file used for verifying signatures"
)]
keyring_file: Option<PathBuf>,
#[clap(
name = "signing_keys",
long = "signing-keys",
env = "BINDLE_SIGNING_KEYS",
about = "location of the TOML file that holds the signing keys used for creating signatures"
)]
signing_file: Option<PathBuf>,
#[clap(
name = "verification_strategy",
long = "strategy",
env = "BINDLE_VERIFICATION_STRATEGY",
about = "The verification strategy to use on the server. Must be one of: CreativeIntegrity, AuthoritativeIntegrity, GreedyVerification, ExhaustiveVerification, MultipleAttestation, MultipleAttestationGreedy. For either of the multiple attestation strategies, you can specify the roles using the following syntax: `MultipleAttestation[Creator, Approver]`"
)]
verification_strategy: Option<bindle::VerificationStrategy>,
#[clap(
name = "use_embedded_db",
long = "use-embedded-db",
short = 'e',
env = "BINDLE_USE_EMBEDDED_DB",
about = "Use the new embedded database provider. This is currently experimental, but fairly stable and more efficient. In the future, this will be the default"
)]
use_embedded_db: bool,
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
let opts = Opts::parse();
// TODO: Allow log level setting outside of RUST_LOG (this is easier with this subscriber)
tracing_subscriber::fmt()
.with_writer(std::io::stderr)
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.init();
// load config file if it exists
let config_file_path = match opts.config_file {
Some(c) => c,
None => default_config_file()
.ok_or_else(|| anyhow::anyhow!("could not find a default config path"))?,
};
let config: Opts = load_toml(config_file_path).await.unwrap_or_else(|e| {
warn!(error = %e, "No server.toml file loaded");
Opts::default()
});
// find socket address
// 1. cli options if set
// 2. config file if set
// 3. default
let addr: SocketAddr = opts
.address
.or(config.address)
.unwrap_or_else(|| String::from("127.0.0.1:8080"))
.parse()?;
// find bindle directory
// 1. cli options if set
// 2. config file if set
// 3. default
let bindle_directory: PathBuf = opts
.bindle_directory
.or(config.bindle_directory)
.unwrap_or_else(|| {
dirs::data_dir()
.expect("Unable to infer data directory")
.join("bindle")
});
// find bindle directory
// 1. cli options if set
// 2. config file if set
// 3. default
// 4. hardcoded `./.bindle/keyring.toml`
// TODO: Should we ensure a keyring?
let keyring_file: PathBuf = opts
.keyring_file
.or(config.keyring_file)
.unwrap_or_else(|| default_config_dir().join("keyring.toml"));
// We might want to do something different in the future. But what we do here is
// load the file if we can find it. If the file just doesn't exist, we print a
// warning and load a placeholder. This prevents the program from failing when
// a keyring does not exist.
//
// All other cases are considered errors worthy of failing.
let keyring: KeyRing = match std::fs::metadata(&keyring_file) {
Ok(md) if md.is_file() => load_toml(keyring_file).await?,
Ok(_) => {
anyhow::bail!("Expected {} to be a regular file", keyring_file.display());
}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
warn!("No keyring.toml found. Using default keyring.");
KeyRing::default()
}
Err(e) => anyhow::bail!("failed to read file {}: {}", keyring_file.display(), e),
};
// Load the signing keys from...
// - --signing-keys filename
// - or config file signing-keys entry
// - or $XDG_DATA/bindle/signing-keys.toml
let signing_keys_config: Option<PathBuf> = opts.signing_file.or(config.signing_file);
let signing_keys = match signing_keys_config {
Some(keypath) => keypath,
None => ensure_signing_keys().await?,
};
let cert_path = opts.cert_path.or(config.cert_path);
let key_path = opts.key_path.or(config.key_path);
// Map doesn't work here because we've already moved data out of opts
#[allow(clippy::manual_map)]
let tls = match cert_path {
None => None,
Some(p) => Some(TlsConfig {
cert_path: p,
key_path: key_path.expect("--key-path should be set if --cert-path was set"),
}),
};
let strategy = opts
.verification_strategy
.or(config.verification_strategy)
.unwrap_or_default();
tracing::info!("Using verification strategy of {:?}", strategy);
let index = search::StrictEngine::default();
let secret_store = SecretKeyFile::load_file(&signing_keys).await.map_err(|e| {
anyhow::anyhow!(
"Failed to load secret key file from {}: {} HINT: Try the flag --signing-keys",
signing_keys.display(),
e
)
})?;
tracing::log::info!(
"Starting server at {}, and serving bindles from {}",
addr.to_string(),
bindle_directory.display()
);
if opts.use_embedded_db {
warn!("Using EmbeddedProvider. This is currently experimental");
let store =
provider::embedded::EmbeddedProvider::new(&bindle_directory, index.clone()).await?;
server(
store,
index,
bindle::authn::always::AlwaysAuthenticate,
bindle::authz::always::AlwaysAuthorize,
addr,
tls,
secret_store,
strategy,
keyring,
)
.await
} else {
tracing::info!("Using FileProvider");
let store = provider::file::FileProvider::new(&bindle_directory, index.clone()).await;
server(
store,
index,
bindle::authn::always::AlwaysAuthenticate,
bindle::authz::always::AlwaysAuthorize,
addr,
tls,
secret_store,
strategy,
keyring,
)
.await
}
}
fn default_config_file() -> Option<PathBuf> {
dirs::config_dir().map(|v| v.join("bindle/server.toml"))
}
fn default_config_dir() -> PathBuf {
dirs::config_dir()
.map(|v| v.join("bindle/"))
.unwrap_or_else(|| "./bindle".into())
}
async fn ensure_config_dir() -> anyhow::Result<PathBuf> {
let dir = default_config_dir();
tokio::fs::create_dir_all(&dir)
.await
.map_err(|e| anyhow::anyhow!("Unable to create config dir at {}: {}", dir.display(), e))?;
Ok(dir)
}
async fn ensure_signing_keys() -> anyhow::Result<PathBuf> {
let base = ensure_config_dir().await?;
let signing_keyfile = base.join("signing-keys.toml");
// Stat it, and if it exists we are good.
match tokio::fs::metadata(&signing_keyfile).await {
Ok(info) if info.is_file() => Ok(signing_keyfile),
Ok(_info) => Err(anyhow::anyhow!("Not a file: {}", signing_keyfile.display())),
// If the file is not found, then drop through and create a default instance
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
let mut default_keyfile = SecretKeyFile::default();
warn!(
"Creating a default host signing key and storing it in {}",
signing_keyfile.display()
);
let key = SecretKeyEntry::new("Default host key".to_owned(), vec![SignatureRole::Host]);
default_keyfile.key.push(key);
default_keyfile
.save_file(&signing_keyfile)
.await
.map_err(|e| {
anyhow::anyhow!(
"Unable to save newly created key to {}: {}",
signing_keyfile.display(),
e
)
})?;
Ok(signing_keyfile)
}
Err(e) => Err(anyhow::anyhow!(
"Failed to load singing keys at {}: {}",
signing_keyfile.display(),
e
)),
}
}
async fn load_toml<T>(file: PathBuf) -> anyhow::Result<T>
where
T: serde::de::DeserializeOwned,
{
// MPB: The original version did an unwrap_or_default() on the read_to_string.
// I removed this because I think we want an error to propogate if the file
// cannot be read.
let raw_data = tokio::fs::read(&file)
.await
.map_err(|e| anyhow::anyhow!("failed to read TOML file {}: {}", file.display(), e))?;
let res = toml::from_slice::<T>(&raw_data)?;
Ok(res)
}
|
use atoms::Location;
///
/// A trait that allows one to access the location of any node
/// that implements this trait.
///
pub trait HasLocation {
///
/// Access the start location of this node.
///
fn start(&self) -> Location;
}
|
use super::mocks::*;
use super::{db_path, pause};
use crate::connector::{AckResponse, EventType, JudgementRequest, JudgementResponse, Message};
use crate::primitives::{Account, AccountType, Challenge, Judgement, NetAccount};
use crate::{test_run, Database};
use matrix_sdk::identifiers::{RoomId, UserId};
use schnorrkel::Keypair;
use std::convert::TryFrom;
use std::sync::Arc;
use tokio::runtime::Runtime;
#[test]
fn matrix_init_message() {
let mut rt = Runtime::new().unwrap();
rt.block_on(async {
// Setup database and manager.
let db = Database::new(&db_path()).unwrap();
let manager = Arc::new(EventManager::new());
let (_, matrix_child) = manager.child();
let my_user_id = UserId::try_from("@registrar:matrix.org").unwrap();
let matrix_transport = MatrixMocker::new(matrix_child, my_user_id);
// Starts tasks.
let handlers = test_run(
Arc::clone(&manager),
db,
Default::default(),
matrix_transport,
DummyTransport::new(),
DummyTransport::new(),
)
.await
.unwrap();
let injector = handlers.reader.injector();
// Generate events.
let msg = serde_json::to_string(&Message {
event: EventType::NewJudgementRequest,
data: serde_json::to_value(&JudgementRequest {
address: NetAccount::alice(),
accounts: [(
AccountType::Matrix,
Some(Account::from("@alice:matrix.org")),
)]
.iter()
.cloned()
.collect(),
})
.unwrap(),
})
.unwrap();
// Send new judgement request.
injector.send_message(msg.clone()).await;
pause().await;
// Verify events.
let events = manager.events().await;
assert!(events.contains(&Event::Connector(ConnectorEvent::Writer {
message: Message {
event: EventType::DisplayNamesRequest,
data: serde_json::to_value(Option::<()>::None).unwrap(),
}
})));
assert!(events.contains(&Event::Connector(ConnectorEvent::Writer {
message: Message {
event: EventType::PendingJudgementsRequests,
data: serde_json::to_value(Option::<()>::None).unwrap(),
}
})));
assert!(events.contains(&Event::Connector(ConnectorEvent::Reader { message: msg })));
assert!(events.contains(&Event::Connector(ConnectorEvent::Writer {
message: Message {
event: EventType::Ack,
data: serde_json::to_value(&AckResponse {
result: String::from("Message acknowledged"),
})
.unwrap(),
}
})));
assert!(events.contains(&Event::Matrix(MatrixEvent::CreateRoom {
to_invite: UserId::try_from("@alice:matrix.org").unwrap(),
})));
assert!(events.contains(&Event::Matrix(MatrixEvent::SendMessage {
room_id: RoomId::try_from("!17:matrix.org").unwrap(),
message: VerifierMessageBlank::InitMessageWithContext,
})));
});
}
#[test]
fn matrix_valid_signature_response() {
let mut rt = Runtime::new().unwrap();
rt.block_on(async {
// Setup database and manager.
let db = Database::new(&db_path()).unwrap();
let manager = Arc::new(EventManager::new());
let (_, matrix_child) = manager.child();
let my_user_id = UserId::try_from("@registrar:matrix.org").unwrap();
let matrix_transport = MatrixMocker::new(matrix_child, my_user_id);
// Starts tasks.
let handlers = test_run(
Arc::clone(&manager),
db,
Default::default(),
matrix_transport,
DummyTransport::new(),
DummyTransport::new(),
)
.await
.unwrap();
let matrix = handlers.matrix;
let injector = handlers.reader.injector();
let keypair = Keypair::generate();
// Generate events.
let msg = serde_json::to_string(&Message {
event: EventType::NewJudgementRequest,
data: serde_json::to_value(&JudgementRequest {
address: NetAccount::from(&keypair.public),
accounts: [(
AccountType::Matrix,
Some(Account::from("@alice:matrix.org")),
)]
.iter()
.cloned()
.collect(),
})
.unwrap(),
})
.unwrap();
// Send new judgement request.
injector.send_message(msg.clone()).await;
pause().await;
// Respond with valid signature.
let signature =
keypair.sign_simple(b"substrate", Challenge::gen_fixed().as_str().as_bytes());
let room_id = RoomId::try_from("!17:matrix.org").unwrap();
matrix.trigger_matrix_emitter(
room_id.clone(),
UserId::try_from("@registrar:matrix.org").unwrap(),
MatrixEventMock {
user_id: UserId::try_from("@alice:matrix.org").unwrap(),
message: hex::encode(signature.to_bytes()),
},
);
pause().await;
// Verify events.
let events = manager.events().await;
// Skip startup events...
assert!(events.contains(&Event::Matrix(MatrixEvent::CreateRoom {
to_invite: UserId::try_from("@alice:matrix.org").unwrap(),
})));
assert!(events.contains(&Event::Matrix(MatrixEvent::SendMessage {
room_id: room_id.clone(),
message: VerifierMessageBlank::InitMessageWithContext
})));
assert!(events.contains(&Event::Matrix(MatrixEvent::SendMessage {
room_id: room_id.clone(),
message: VerifierMessageBlank::ResponseValid,
})));
assert!(events.contains(&Event::Connector(ConnectorEvent::Writer {
message: Message {
event: EventType::JudgementResult,
data: serde_json::to_value(&JudgementResponse {
address: NetAccount::from(&keypair.public),
judgement: Judgement::Reasonable,
})
.unwrap()
}
})));
assert!(events.contains(&Event::Matrix(MatrixEvent::SendMessage {
room_id: room_id.clone(),
message: VerifierMessageBlank::Goodbye,
})));
assert!(events.contains(&Event::Matrix(MatrixEvent::LeaveRoom { room_id: room_id })));
});
}
#[test]
fn matrix_invalid_signature_response() {
let mut rt = Runtime::new().unwrap();
rt.block_on(async {
// Setup database and manager.
let db = Database::new(&db_path()).unwrap();
let manager = Arc::new(EventManager::new());
let (_, matrix_child) = manager.child();
let my_user_id = UserId::try_from("@registrar:matrix.org").unwrap();
let matrix_transport = MatrixMocker::new(matrix_child, my_user_id);
// Starts tasks.
let handlers = test_run(
Arc::clone(&manager),
db,
Default::default(),
matrix_transport,
DummyTransport::new(),
DummyTransport::new(),
)
.await
.unwrap();
let matrix = handlers.matrix;
let injector = handlers.reader.injector();
let keypair = Keypair::generate();
// Generate events.
let msg = serde_json::to_string(&Message {
event: EventType::NewJudgementRequest,
data: serde_json::to_value(&JudgementRequest {
address: NetAccount::from(&keypair.public),
accounts: [(
AccountType::Matrix,
Some(Account::from("@alice:matrix.org")),
)]
.iter()
.cloned()
.collect(),
})
.unwrap(),
})
.unwrap();
// Send new judgement request.
injector.send_message(msg.clone()).await;
pause().await;
// Respond with invalid and valid signature.
let signature =
keypair.sign_simple(b"substrate", Challenge::gen_random().as_str().as_bytes());
let room_id = RoomId::try_from("!17:matrix.org").unwrap();
matrix.trigger_matrix_emitter(
room_id.clone(),
UserId::try_from("@registrar:matrix.org").unwrap(),
MatrixEventMock {
user_id: UserId::try_from("@alice:matrix.org").unwrap(),
message: hex::encode(signature.to_bytes()),
},
);
pause().await;
// Verify events.
let events = manager.events().await;
println!("{:?}", events);
// Skip startup events...
assert!(events.contains(&Event::Matrix(MatrixEvent::CreateRoom {
to_invite: UserId::try_from("@alice:matrix.org").unwrap(),
})));
assert!(events.contains(&Event::Matrix(MatrixEvent::SendMessage {
room_id: room_id.clone(),
message: VerifierMessageBlank::InitMessageWithContext,
})));
assert!(events.contains(&Event::Matrix(MatrixEvent::SendMessage {
room_id: room_id.clone(),
message: VerifierMessageBlank::ResponseInvalid,
})));
});
}
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[repr(transparent)]
#[doc(hidden)]
pub struct IWindowManagementPreview(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IWindowManagementPreview {
type Vtable = IWindowManagementPreview_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x4ef55b0d_561d_513c_a67c_2c02b69cef41);
}
#[repr(C)]
#[doc(hidden)]
pub struct IWindowManagementPreview_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct IWindowManagementPreviewStatics(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for IWindowManagementPreviewStatics {
type Vtable = IWindowManagementPreviewStatics_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x0f9725c6_c004_5a23_8fd2_8d092ce2704a);
}
#[repr(C)]
#[doc(hidden)]
pub struct IWindowManagementPreviewStatics_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
#[cfg(feature = "Foundation")] pub unsafe extern "system" fn(this: ::windows::core::RawPtr, window: ::windows::core::RawPtr, preferredframeminsize: super::super::super::Foundation::Size) -> ::windows::core::HRESULT,
#[cfg(not(feature = "Foundation"))] usize,
);
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct WindowManagementPreview(pub ::windows::core::IInspectable);
impl WindowManagementPreview {
#[cfg(feature = "Foundation")]
pub fn SetPreferredMinSize<'a, Param0: ::windows::core::IntoParam<'a, super::AppWindow>, Param1: ::windows::core::IntoParam<'a, super::super::super::Foundation::Size>>(window: Param0, preferredframeminsize: Param1) -> ::windows::core::Result<()> {
Self::IWindowManagementPreviewStatics(|this| unsafe { (::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), window.into_param().abi(), preferredframeminsize.into_param().abi()).ok() })
}
pub fn IWindowManagementPreviewStatics<R, F: FnOnce(&IWindowManagementPreviewStatics) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<WindowManagementPreview, IWindowManagementPreviewStatics> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
}
unsafe impl ::windows::core::RuntimeType for WindowManagementPreview {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"rc(Windows.UI.WindowManagement.Preview.WindowManagementPreview;{4ef55b0d-561d-513c-a67c-2c02b69cef41})");
}
unsafe impl ::windows::core::Interface for WindowManagementPreview {
type Vtable = IWindowManagementPreview_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x4ef55b0d_561d_513c_a67c_2c02b69cef41);
}
impl ::windows::core::RuntimeName for WindowManagementPreview {
const NAME: &'static str = "Windows.UI.WindowManagement.Preview.WindowManagementPreview";
}
impl ::core::convert::From<WindowManagementPreview> for ::windows::core::IUnknown {
fn from(value: WindowManagementPreview) -> Self {
value.0 .0
}
}
impl ::core::convert::From<&WindowManagementPreview> for ::windows::core::IUnknown {
fn from(value: &WindowManagementPreview) -> Self {
value.0 .0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for WindowManagementPreview {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0 .0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a WindowManagementPreview {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0 .0)
}
}
impl ::core::convert::From<WindowManagementPreview> for ::windows::core::IInspectable {
fn from(value: WindowManagementPreview) -> Self {
value.0
}
}
impl ::core::convert::From<&WindowManagementPreview> for ::windows::core::IInspectable {
fn from(value: &WindowManagementPreview) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for WindowManagementPreview {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for &'a WindowManagementPreview {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Borrowed(&self.0)
}
}
unsafe impl ::core::marker::Send for WindowManagementPreview {}
unsafe impl ::core::marker::Sync for WindowManagementPreview {}
|
{{~#if is_symmetric}}
// The filtered choice is symmetric, so only the lower triangular part needs to be
// filtered.
if {{>set.id_getter arguments.[0].[1] item=arguments.[0].[0]}} <
{{>set.id_getter arguments.[1].[1] item=arguments.[1].[0]}} {
{{/if}}
let mut values = {{>value_type.full_domain choice_full_type}};
{{>filter_call filter_call}}
trace!("call restrict from {}, line {}", file!(), line!());
{{choice}}::restrict({{>choice.arg_ids}}ir_instance, store, values, diff)?;
{{~#if is_symmetric}}
}
{{/if}}
|
use std::time::Instant;
/// 有面值1,5,10,20,50,100的人民币,求问10000有多少种组成方法?
fn main() {
for i in (1000..=10000).step_by(1000) {
println!("n={}", i);
func1(i);
println!();
func2(i);
println!("===");
}
}
/// 动态规划解法
fn func1(target: usize) {
let start = Instant::now();
let coins = [5, 10, 20, 50, 100];
let mut table = vec![1 as u64; target + 1];
for j in 0..coins.len() {
for i in coins[j]..=target {
table[i] += table[i - coins[j]];
}
}
let end = Instant::now();
println!("结果 {}", table[target]);
println!("运行时间 {:?}", end - start);
}
use std::collections::HashMap;
///
/// 递归解
fn func2(target: usize) {
/// 用指定的几种面值的钱,组合成目标值, 有多少种可能
///
/// # 参数
/// * coins 表示硬币类型
/// * k 表示可以使用的硬币类型 总类型的前几种
/// * target 组合的目标
/// * cache 缓存用 减少重复计算的
fn count(coins: &[i32], k: usize, target: i32, cache: &mut HashMap<(usize, i32), u64>) -> u64 {
// 目标值是0 递归结束 有一种可能性
if target == 0 {
return 1;
}
// 可以使用的钱币只剩一种了,而且 目标值不是0
// 因为把一元面值的放在最后,所以,存在且仅存在一种方案
if k == 1 {
return 1;
}
// 读缓存,缓存有的话就不要计算了
if cache.contains_key(&(k, target)) {
return *cache.get(&(k, target)).unwrap();
}
let mut res = 0;
// 从用 0张 到用N张 第 k 种面值的钱 用完则不再用这种面值的钱
// 问题转化成用前K-1种面值的钱组合成 target-n*coins[k - 1] 的可能性的和
// 递归走起
for dealed in (0..=target).step_by(coins[k - 1] as usize) {
res += count(coins, k - 1, target - dealed, cache);
}
// 记录进缓存信息里
cache.insert((k, target), res);
res
}
let coins = [1, 5, 10, 20, 50, 100];
let start = Instant::now();
let res = count(&coins, coins.len(), target as i32, &mut HashMap::new());
let end = Instant::now();
// println!("{:?}", local);
println!("程序运行时间是 --> {:?}", end - start);
println!("共有 {} 种可能性", res);
}
|
use std::io::{Read, Result as IOResult};
use crate::PrimitiveRead;
pub struct VertexFileFixup {
pub lod: i32,
pub source_vertex_id: i32,
pub vertices_count: i32
}
impl VertexFileFixup {
pub fn read(read: &mut dyn Read) -> IOResult<Self> {
let lod = read.read_i32()?;
let source_vertex_id = read.read_i32()?;
let vertexes_count = read.read_i32()?;
Ok(Self {
lod,
source_vertex_id,
vertices_count: vertexes_count
})
}
}
|
pub mod pb {
tonic::include_proto!("grpc.examples.echo");
}
use http::header::HeaderValue;
use pb::{echo_client::EchoClient, EchoRequest};
use tonic::transport::Channel;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let channel = Channel::from_static("http://[::1]:50051")
.intercept_headers(|headers| {
headers.insert(
"authorization",
HeaderValue::from_static("Bearer some-secret-token"),
);
})
.connect()
.await?;
let mut client = EchoClient::new(channel);
let request = tonic::Request::new(EchoRequest {
message: "hello".into(),
});
let response = client.unary_echo(request).await?;
println!("RESPONSE={:?}", response);
Ok(())
}
|
pub const PI: f32 = std::f32::consts::PI;
pub const DEG_2_RAD: f32 = PI / 180.0;
pub const RAD_2_DEG: f32 = 180.0 / PI;
pub const SAFE_F: f32 = 0.0001;
pub const SAFE_HALF_PI_MAX: f32 = (PI / 2.0) - SAFE_F;
pub const SAFE_HALF_PI_MIN: f32 = (-PI / 2.0) + SAFE_F;
|
use rand::Rng;
/// Generate a [base64url][1] encoded, cryptographically secure, random string.
///
/// [1]: https://tools.ietf.org/html/rfc4648#page-7
pub fn generate_random_base64url(length: usize) -> String {
let mut rng = rand::thread_rng();
let mut bytes = vec![0; length];
for i in 0..length {
let num: u8 = rng.gen_range(0, 64);
if num < 26 {
bytes[i] = b'A' + num;
} else if num < 2 * 26 {
bytes[i] = b'a' + num - 26;
} else if num < 2 * 26 + 10 {
bytes[i] = b'0' + num - 2 * 26;
} else if num == 2 * 26 + 10 {
bytes[i] = b'-';
} else {
bytes[i] = b'_';
}
}
unsafe {
return String::from_utf8_unchecked(bytes);
}
}
|
extern crate num_cpus;
extern crate threadpool;
use std::thread;
use threadpool::ThreadPool;
use std::time::Duration;
fn main() {
let ncpus = num_cpus::get();
println!("The number of cpus in this machine is: {}", ncpus);
let pool = ThreadPool::new(ncpus);
for i in 0..ncpus {
pool.execute(move || {
println!("this is thread number {}", i) });
}
thread::sleep(Duration::from_millis(50));
} |
use std::collections::HashMap;
fn main() {
let mut input = include_str!("input").split("\r\n\r\n");
let start= input.next().unwrap();
let map: HashMap<_, _> = input.next().unwrap().lines().map(
|x| {
let mut spl = x.split(" -> ");
let mut inp = spl.next().unwrap().chars();
let input_chars = (inp.next().unwrap(), inp.next().unwrap());
(input_chars, spl.next().unwrap().chars().next().unwrap())
}
).collect();
// more diomatic way of folding into hashmap?
let mut state = start.chars().zip(start.chars().skip(1)).fold(HashMap::new(),
|mut acc, x| {
*acc.entry(x).or_insert(0) += 1 as i64;
acc
});
for _ in 0..10 { state = mutate(state, &map) }
println!("Part one: {}", calculate_score(&state));
for _ in 0..30 { state = mutate(state, &map) }
println!("Part two: {}", calculate_score(&state));
}
fn mutate(state: HashMap<(char, char), i64>, map: &HashMap<(char, char), char>) -> HashMap<(char, char), i64> {
state.into_iter().fold(HashMap::new(),
|mut acc, (old_chars, count)| {
if let Some(&new_char) = map.get(&old_chars) {
let pair_one = (old_chars.0, new_char);
let pair_two = (new_char, old_chars.1);
*acc.entry(pair_one).or_insert(0) += count;
*acc.entry(pair_two).or_insert(0) += count;
} else {
panic!("None mutating pairs not implemented");
}
acc
})
}
fn calculate_score(state: &HashMap<(char, char), i64>) -> i64 {
let final_counts: Vec<_> = state.iter().fold(HashMap::new(),
|mut acc, (chars, count)| {
*acc.entry(chars.0).or_insert(0) += count;
*acc.entry(chars.1).or_insert(0) += count;
acc
}).into_values().collect();
let lowest = *final_counts.iter().min().unwrap();
let highest = final_counts.into_iter().max().unwrap();
(highest + (highest%2) - lowest - (lowest%2))/2
} |
use libc;
use crate::mmapstring::*;
use crate::x::*;
pub const MAIL_CHARCONV_ERROR_CONV: libc::c_uint = 3;
pub const MAIL_CHARCONV_ERROR_MEMORY: libc::c_uint = 2;
pub const MAIL_CHARCONV_ERROR_UNKNOWN_CHARSET: libc::c_uint = 1;
pub const MAIL_CHARCONV_NO_ERROR: libc::c_uint = 0;
/* *
* define your own conversion.
* - result is big enough to contain your converted string
* - result_len contain the maximum size available (out value must contain the final converted size)
* - your conversion return an error code based on upper enum values
*/
pub static mut extended_charconv: Option<
unsafe fn(
_: *const libc::c_char,
_: *const libc::c_char,
_: *const libc::c_char,
_: size_t,
_: *mut libc::c_char,
_: *mut size_t,
) -> libc::c_int,
> = None;
pub unsafe fn charconv(
mut tocode: *const libc::c_char,
mut fromcode: *const libc::c_char,
mut str: *const libc::c_char,
mut length: size_t,
mut result: *mut *mut libc::c_char,
) -> libc::c_int {
let mut conv: iconv_t = 0 as *mut libc::c_void;
let mut r: size_t = 0;
let mut pout: *mut libc::c_char = 0 as *mut libc::c_char;
let mut out_size: size_t = 0;
let mut old_out_size: size_t = 0;
let mut count: size_t = 0;
let mut out: *mut libc::c_char = 0 as *mut libc::c_char;
let mut res: libc::c_int = 0;
fromcode = get_valid_charset(fromcode);
if extended_charconv.is_some() {
let mut result_length: size_t = 0;
result_length = length.wrapping_mul(6i32 as libc::size_t);
*result = malloc(
length
.wrapping_mul(6i32 as libc::size_t)
.wrapping_add(1i32 as libc::size_t),
) as *mut libc::c_char;
if (*result).is_null() {
res = MAIL_CHARCONV_ERROR_MEMORY as libc::c_int
} else {
res = extended_charconv.expect("non-null function pointer")(
tocode,
fromcode,
str,
length,
*result,
&mut result_length,
);
if res != MAIL_CHARCONV_NO_ERROR as libc::c_int {
free(*result as *mut libc::c_void);
} else {
out = realloc(
*result as *mut libc::c_void,
result_length.wrapping_add(1i32 as libc::size_t),
) as *mut libc::c_char;
if !out.is_null() {
*result = out
}
*(*result).offset(result_length as isize) = '\u{0}' as i32 as libc::c_char
}
}
if res != MAIL_CHARCONV_ERROR_UNKNOWN_CHARSET as libc::c_int {
return res;
}
}
conv = iconv_open(tocode, fromcode);
if conv == -1i32 as iconv_t {
res = MAIL_CHARCONV_ERROR_UNKNOWN_CHARSET as libc::c_int
} else {
out_size = (6i32 as libc::size_t).wrapping_mul(length);
out = malloc(out_size.wrapping_add(1i32 as libc::size_t)) as *mut libc::c_char;
if out.is_null() {
res = MAIL_CHARCONV_ERROR_MEMORY as libc::c_int
} else {
pout = out;
old_out_size = out_size;
r = mail_iconv(
conv,
&mut str,
&mut length,
&mut pout,
&mut out_size,
0 as *mut *mut libc::c_char,
b"?\x00" as *const u8 as *const libc::c_char as *mut libc::c_char,
);
if r == -1i32 as size_t {
res = MAIL_CHARCONV_ERROR_CONV as libc::c_int;
free(out as *mut libc::c_void);
} else {
iconv_close(conv);
*pout = '\u{0}' as i32 as libc::c_char;
count = old_out_size.wrapping_sub(out_size);
pout = realloc(
out as *mut libc::c_void,
count.wrapping_add(1i32 as libc::size_t),
) as *mut libc::c_char;
if !pout.is_null() {
out = pout
}
*result = out;
return MAIL_CHARCONV_NO_ERROR as libc::c_int;
}
}
iconv_close(conv);
}
return res;
}
unsafe fn mail_iconv(
mut cd: iconv_t,
mut inbuf: *mut *const libc::c_char,
mut inbytesleft: *mut size_t,
mut outbuf: *mut *mut libc::c_char,
mut outbytesleft: *mut size_t,
mut inrepls: *mut *mut libc::c_char,
mut outrepl: *mut libc::c_char,
) -> size_t {
let mut ret: size_t = 0i32 as size_t;
let mut ret1: size_t = 0;
/* XXX - force const to mutable */
let mut ib: *mut libc::c_char = *inbuf as *mut libc::c_char;
let mut ibl: size_t = *inbytesleft;
let mut ob: *mut libc::c_char = *outbuf;
let mut obl: size_t = *outbytesleft;
loop {
ret1 = iconv(cd, &mut ib, &mut ibl, &mut ob, &mut obl);
if ret1 != -1i32 as size_t {
ret = (ret as libc::size_t).wrapping_add(ret1) as size_t as size_t
}
let err = std::io::Error::last_os_error().raw_os_error();
if 0 != ibl && 0 != obl && err == Some(92) {
if !inrepls.is_null() {
/* Try replacing the input */
let mut t: *mut *mut libc::c_char = 0 as *mut *mut libc::c_char;
t = inrepls;
while !(*t).is_null() {
let mut ib1: *mut libc::c_char = *t;
let mut ibl1: size_t = strlen(*t);
let mut ob1: *mut libc::c_char = ob;
let mut obl1: size_t = obl;
iconv(cd, &mut ib1, &mut ibl1, &mut ob1, &mut obl1);
if 0 == ibl1 {
ib = ib.offset(1isize);
ibl = ibl.wrapping_sub(1);
ob = ob1;
obl = obl1;
ret = ret.wrapping_add(1);
break;
} else {
t = t.offset(1isize)
}
}
if !(*t).is_null() {
continue;
}
}
if !outrepl.is_null() {
/* Try replacing the output */
let mut n: size_t = strlen(outrepl);
if n <= obl {
memcpy(ob as *mut libc::c_void, outrepl as *const libc::c_void, n);
ib = ib.offset(1isize);
ibl = ibl.wrapping_sub(1);
ob = ob.offset(n as isize);
obl = (obl as libc::c_ulong).wrapping_sub(n as u64) as size_t;
ret = ret.wrapping_add(1);
continue;
}
}
}
*inbuf = ib;
*inbytesleft = ibl;
*outbuf = ob;
*outbytesleft = obl;
return ret;
}
}
unsafe fn get_valid_charset(mut fromcode: *const libc::c_char) -> *const libc::c_char {
if strcasecmp(fromcode, b"GB2312\x00" as *const u8 as *const libc::c_char) == 0i32
|| strcasecmp(
fromcode,
b"GB_2312-80\x00" as *const u8 as *const libc::c_char,
) == 0i32
{
fromcode = b"GBK\x00" as *const u8 as *const libc::c_char
} else if strcasecmp(
fromcode,
b"iso-8859-8-i\x00" as *const u8 as *const libc::c_char,
) == 0i32
|| strcasecmp(
fromcode,
b"iso_8859-8-i\x00" as *const u8 as *const libc::c_char,
) == 0i32
|| strcasecmp(
fromcode,
b"iso8859-8-i\x00" as *const u8 as *const libc::c_char,
) == 0i32
{
fromcode = b"iso-8859-8\x00" as *const u8 as *const libc::c_char
} else if strcasecmp(
fromcode,
b"iso-8859-8-e\x00" as *const u8 as *const libc::c_char,
) == 0i32
|| strcasecmp(
fromcode,
b"iso_8859-8-e\x00" as *const u8 as *const libc::c_char,
) == 0i32
|| strcasecmp(
fromcode,
b"iso8859-8-e\x00" as *const u8 as *const libc::c_char,
) == 0i32
{
fromcode = b"iso-8859-8\x00" as *const u8 as *const libc::c_char
} else if strcasecmp(
fromcode,
b"ks_c_5601-1987\x00" as *const u8 as *const libc::c_char,
) == 0i32
{
fromcode = b"euckr\x00" as *const u8 as *const libc::c_char
} else if strcasecmp(
fromcode,
b"iso-2022-jp\x00" as *const u8 as *const libc::c_char,
) == 0i32
{
fromcode = b"iso-2022-jp-2\x00" as *const u8 as *const libc::c_char
}
return fromcode;
}
pub unsafe fn charconv_buffer(
mut tocode: *const libc::c_char,
mut fromcode: *const libc::c_char,
mut str: *const libc::c_char,
mut length: size_t,
mut result: *mut *mut libc::c_char,
mut result_len: *mut size_t,
) -> libc::c_int {
let mut conv: iconv_t = 0 as *mut libc::c_void;
let mut iconv_r: size_t = 0;
let mut r: libc::c_int = 0;
let mut out: *mut libc::c_char = 0 as *mut libc::c_char;
let mut pout: *mut libc::c_char = 0 as *mut libc::c_char;
let mut out_size: size_t = 0;
let mut old_out_size: size_t = 0;
let mut count: size_t = 0;
let mut res: libc::c_int = 0;
let mut mmapstr: *mut MMAPString = 0 as *mut MMAPString;
fromcode = get_valid_charset(fromcode);
if extended_charconv.is_some() {
let mut result_length: size_t = 0;
result_length = length.wrapping_mul(6i32 as usize);
mmapstr = mmap_string_sized_new(result_length.wrapping_add(1i32 as libc::size_t));
*result_len = 0i32 as size_t;
if mmapstr.is_null() {
res = MAIL_CHARCONV_ERROR_MEMORY as libc::c_int
} else {
res = extended_charconv.expect("non-null function pointer")(
tocode,
fromcode,
str,
length,
(*mmapstr).str_0,
&mut result_length,
);
if res != MAIL_CHARCONV_ERROR_UNKNOWN_CHARSET as libc::c_int {
if res == MAIL_CHARCONV_NO_ERROR as libc::c_int {
*result = (*mmapstr).str_0;
res = mmap_string_ref(mmapstr);
if res < 0i32 {
res = MAIL_CHARCONV_ERROR_MEMORY as libc::c_int;
mmap_string_free(mmapstr);
} else {
mmap_string_set_size(mmapstr, result_length);
*result_len = result_length
}
} else {
mmap_string_free(mmapstr);
}
} else {
mmap_string_free(mmapstr);
}
return res;
}
}
conv = iconv_open(tocode, fromcode);
if conv == -1i32 as iconv_t {
res = MAIL_CHARCONV_ERROR_UNKNOWN_CHARSET as libc::c_int
} else {
out_size = (6i32 as libc::size_t).wrapping_mul(length);
mmapstr = mmap_string_sized_new(out_size.wrapping_add(1i32 as libc::size_t));
if mmapstr.is_null() {
res = MAIL_CHARCONV_ERROR_MEMORY as libc::c_int
} else {
out = (*mmapstr).str_0;
pout = out;
old_out_size = out_size;
iconv_r = mail_iconv(
conv,
&mut str,
&mut length,
&mut pout,
&mut out_size,
0 as *mut *mut libc::c_char,
b"?\x00" as *const u8 as *const libc::c_char as *mut libc::c_char,
);
if iconv_r == -1i32 as size_t {
res = MAIL_CHARCONV_ERROR_CONV as libc::c_int
} else {
iconv_close(conv);
*pout = '\u{0}' as i32 as libc::c_char;
count = old_out_size.wrapping_sub(out_size);
r = mmap_string_ref(mmapstr);
if r < 0i32 {
res = MAIL_CHARCONV_ERROR_MEMORY as libc::c_int
} else {
*result = out;
*result_len = count;
return MAIL_CHARCONV_NO_ERROR as libc::c_int;
}
}
mmap_string_free(mmapstr);
}
}
return res;
}
pub unsafe fn charconv_buffer_free(mut str: *mut libc::c_char) {
mmap_string_unref(str);
}
|
pub type CartId = String;
pub type ItemCode = String;
pub type Quantity = u32;
pub type Amount = i32;
#[derive(Debug, Clone)]
pub enum Item {
Product { code: ItemCode, price: Amount },
}
#[derive(Debug, Clone)]
pub struct CartLine {
item: Item,
qty: Quantity,
}
#[derive(Debug, Clone)]
pub enum Cart {
Nothing,
Empty { id: CartId },
Active { id: CartId, lines: Vec<CartLine> },
}
#[derive(Debug, Clone)]
pub enum Event {
Created { id: CartId },
ItemAdded { item: Item, qty: Quantity },
ItemRemoved { item: Item, qty: Quantity },
QtyChanged { item: Item, new_qty: Quantity, old_qty: Quantity },
}
impl Item {
pub fn code(&self) -> &ItemCode {
match self {
Self::Product { code, .. } => code
}
}
}
type CartOutput = Option<(Cart, Event)>;
impl Cart {
pub fn create(&self, id: CartId) -> CartOutput {
match self {
Self::Nothing => {
let event = Event::Created { id: id.clone() };
let state = Self::Empty { id: id.clone() };
Some((state, event))
}
_ => None
}
}
pub fn change_qty<F>(&self, item_code: ItemCode, qty: Quantity, find: F) -> CartOutput
where
F: Fn(&ItemCode) -> Option<Item>
{
match self {
Self::Empty { id } if qty > 0 => {
Self::find_item(&item_code, find)
.map(|item| {
let event = Event::ItemAdded { item: item.clone(), qty };
let state = Self::Active {
id: id.clone(),
lines: vec![ CartLine { item, qty } ],
};
(state, event)
})
}
Self::Active { id, lines } => {
let trg_line = lines
.iter()
.find(|&d| d.item_code() == &item_code);
match trg_line {
Some(line) if qty == 0 => {
let event = Event::ItemRemoved { item: line.item.to_owned(), qty: line.qty };
let new_lines = lines
.iter()
.cloned()
.filter(|l| l.item_code().ne(&item_code))
.collect::<Vec<_>>();
if new_lines.is_empty() {
Some((Self::Empty { id: id.clone() }, event))
} else {
Some((Self::Active { id: id.clone(), lines: new_lines }, event))
}
}
Some(line) if qty != line.qty => {
let event = Event::QtyChanged { item: line.item.clone(), new_qty: qty, old_qty: line.qty };
let new_lines = lines
.iter()
.map(|l|
if l.item_code() == line.item_code() {
CartLine { qty, ..line.to_owned() }
} else {
l.clone()
}
)
.collect::<Vec<_>>();
Some((Self::Active { id: id.clone(), lines: new_lines }, event))
}
None if qty > 0 => {
Self::find_item(&item_code, find)
.map(|item| {
let event = Event::ItemAdded { item: item.clone(), qty };
let state = Self::Active {
id: id.clone(),
lines: [
lines.clone(),
vec![CartLine { item, qty }]
].concat()
};
(state, event)
})
}
_ => None
}
}
_ => None
}
}
fn find_item<F>(item_code: &ItemCode, find: F) -> Option<Item>
where
F: Fn(&ItemCode) -> Option<Item>
{
find(item_code).filter(|i| i.code() == item_code)
}
}
impl CartLine {
pub fn item_code(&self) -> &ItemCode {
self.item.code()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn create_from_nothing() {
if let Some((s, e)) = Cart::Nothing.create("cart-1".to_string()) {
if let Cart::Empty { id } = s {
assert_eq!(id, "cart-1".to_string());
} else {
assert!(false);
}
if let Event::Created { id } = e {
assert_eq!(id, "cart-1".to_string());
} else {
assert!(false);
}
} else {
assert!(false);
}
}
#[test]
fn create_from_other() {
let state = Cart::Empty { id: "".to_string() };
let r = state.create("cart-1".to_string());
assert!(r.is_none());
}
#[test]
fn add_item_to_empty() {
let state = Cart::Empty { id: "cart-1".to_string() };
let item1 = Item::Product { code: "item-1".to_string(), price: 1000 };
if let Some((s, e)) = state.change_qty("item-1".to_string(), 2, |_| Some(item1.clone())) {
if let Cart::Active { id, lines } = s {
assert_eq!(id, "cart-1".to_string());
let line = lines.first().unwrap();
assert_eq!(line.item_code(), "item-1");
assert_eq!(line.qty, 2);
} else {
assert!(false);
}
if let Event::ItemAdded { item, qty } = e {
assert_eq!(item.code(), "item-1");
assert_eq!(qty, 2);
} else {
assert!(false);
}
} else {
assert!(false);
}
}
#[test]
fn add_item_with_find_different_code() {
let state = Cart::Empty { id: "cart-1".to_string() };
let item1 = Item::Product { code: "item-1".to_string(), price: 1000 };
let r = state.change_qty("item-2".to_string(), 2, |_| Some(item1.clone()));
assert!(r.is_none());
}
#[test]
fn add_zero_qty_to_empty() {
let state = Cart::Empty { id: "cart-1".to_string() };
let item1 = Item::Product { code: "item-1".to_string(), price: 1000 };
let r = state.change_qty("item-1".to_string(), 0, |_| Some(item1.clone()));
assert!(r.is_none());
}
#[test]
fn add_item_to_active() {
let state = Cart::Active { id: "cart-1".to_string(), lines: vec![
CartLine { item: Item::Product { code: "item-1".to_string(), price: 100 }, qty: 2 }
] };
let item2 = Item::Product { code: "item-2".to_string(), price: 200 };
let r = state.change_qty("item-2".to_string(), 1, |_| Some(item2.clone()));
if let Some((s, e)) = r {
if let Cart::Active { id, lines } = s {
assert_eq!(id, "cart-1".to_string());
assert_eq!(lines.len(), 2);
let line = lines.last().unwrap();
assert_eq!(line.item_code(), "item-2");
assert_eq!(line.qty, 1);
} else {
assert!(false);
}
if let Event::ItemAdded { item, qty } = e {
assert_eq!(item.code(), "item-2");
assert_eq!(qty, 1);
} else {
assert!(false);
}
} else {
assert!(false);
}
}
#[test]
fn change_qty() {
let state = Cart::Active { id: "cart-1".to_string(), lines: vec![
CartLine { item: Item::Product { code: "item-1".to_string(), price: 100 }, qty: 2 }
] };
let item1 = Item::Product { code: "item-1".to_string(), price: 100 };
let r = state.change_qty("item-1".to_string(), 1, |_| Some(item1.clone()));
if let Some((s, e)) = r {
if let Cart::Active { id, lines } = s {
assert_eq!(id, "cart-1".to_string());
assert_eq!(lines.len(), 1);
let line = lines.first().unwrap();
assert_eq!(line.item_code(), "item-1");
assert_eq!(line.qty, 1);
} else {
assert!(false);
}
if let Event::QtyChanged { item, new_qty, old_qty } = e {
assert_eq!(item.code(), "item-1");
assert_eq!(new_qty, 1);
assert_eq!(old_qty, 2);
} else {
assert!(false);
}
} else {
assert!(false);
}
}
#[test]
fn change_qty_with_same_qty() {
let state = Cart::Active { id: "cart-1".to_string(), lines: vec![
CartLine { item: Item::Product { code: "item-1".to_string(), price: 100 }, qty: 2 }
] };
let item1 = Item::Product { code: "item-1".to_string(), price: 100 };
let r = state.change_qty("item-1".to_string(), 2, |_| Some(item1.clone()));
assert!(r.is_none());
}
#[test]
fn remove_only_item() {
let state = Cart::Active { id: "cart-1".to_string(), lines: vec![
CartLine { item: Item::Product { code: "item-1".to_string(), price: 100 }, qty: 2 }
] };
let item1 = Item::Product { code: "item-1".to_string(), price: 100 };
let r = state.change_qty("item-1".to_string(), 0, |_| Some(item1.clone()));
if let Some((s, e)) = r {
if let Cart::Empty { id } = s {
assert_eq!(id, "cart-1".to_string());
} else {
assert!(false);
}
if let Event::ItemRemoved { item, qty } = e {
assert_eq!(item.code(), "item-1");
assert_eq!(qty, 2);
} else {
assert!(false);
}
} else {
assert!(false);
}
}
#[test]
fn remove_item() {
let state = Cart::Active { id: "cart-1".to_string(), lines: vec![
CartLine { item: Item::Product { code: "item-1".to_string(), price: 100 }, qty: 2 },
CartLine { item: Item::Product { code: "item-2".to_string(), price: 200 }, qty: 1 }
] };
let item1 = Item::Product { code: "item-1".to_string(), price: 100 };
let r = state.change_qty("item-1".to_string(), 0, |_| Some(item1.clone()));
if let Some((s, e)) = r {
if let Cart::Active { id, lines } = s {
assert_eq!(id, "cart-1".to_string());
assert_eq!(lines.len(), 1);
let line = lines.first().unwrap();
assert_eq!(line.item_code(), "item-2");
assert_eq!(line.qty, 1);
} else {
assert!(false);
}
if let Event::ItemRemoved { item, qty } = e {
assert_eq!(item.code(), "item-1");
assert_eq!(qty, 2);
} else {
assert!(false);
}
} else {
assert!(false);
}
}
} |
//! Convert all music in music/ to ogg.
use std::env;
use std::fs;
use std::fs::File;
use std::path::Path;
use std::process::Command;
use tar::Builder;
fn main() {
let out_dir = env::var("OUT_DIR").unwrap();
let music_dir = Path::new("music");
let music_extensions = vec!["mp3", "opus", "flac", "m4a", "ogg"];
let output_tar_path = Path::new(&out_dir).join("music.tar");
let output_tar_file = File::create(output_tar_path).expect("Couldn't create output tar file");
let mut output_archive = Builder::new(output_tar_file);
for entry in fs::read_dir(music_dir).expect("Couldn't open music dir") {
let good_entry = entry.expect("Something weird happened");
if good_entry
.file_type()
.expect("Couldn't get filetype for some reason")
.is_file()
{
let input_path = good_entry.path();
let input_ext = input_path.extension().unwrap().to_str().unwrap();
if music_extensions.contains(&input_ext) {
let output_file_path = input_path.with_extension("ogg");
let output_file_name = output_file_path.file_name().unwrap();
let output_final_path = Path::new(&out_dir).join(output_file_name);
println!(
"Converting {input} to {output}",
input = input_path.to_string_lossy(),
output = output_final_path.to_string_lossy()
);
// Finally after all the path fuckery use ffmpeg to convert all input media to ogg.
// Don't judge me. ogg is fine.
let ffmpeg_command = Command::new("ffmpeg")
.arg("-n")
.arg("-i")
.arg(input_path)
.arg(&output_final_path)
.output();
ffmpeg_command.expect("ffmpeg failed somehow: {ffmpeg_command}");
let mut f =
File::open(output_final_path).expect("Couldn't open output file for reading");
output_archive
.append_file(output_file_name, &mut f)
.unwrap();
} else {
println!(
"Not converting file with unknown extension: {:?}",
good_entry
);
}
}
}
}
|
use std::{error::Error, fmt, result::Result};
use smartstring::alias::String;
use crate::parser::{Lexer, LexerError, Spanning, Token};
/// Error while parsing a GraphQL query
#[derive(Debug, Eq, PartialEq)]
pub enum ParseError {
/// An unexpected token occurred in the source
// TODO: Previously was `Token<'a>`.
// Revisit on `graphql-parser` integration.
UnexpectedToken(String),
/// The input source abruptly ended
UnexpectedEndOfFile,
/// An error during tokenization occurred
LexerError(LexerError),
/// A scalar of unexpected type occurred in the source
ExpectedScalarError(&'static str),
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::UnexpectedToken(token) => write!(f, "Unexpected \"{token}\""),
Self::UnexpectedEndOfFile => write!(f, "Unexpected end of input"),
Self::LexerError(e) => e.fmt(f),
Self::ExpectedScalarError(e) => e.fmt(f),
}
}
}
impl Error for ParseError {
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
Self::LexerError(e) => Some(e),
Self::ExpectedScalarError(_) | Self::UnexpectedToken(_) | Self::UnexpectedEndOfFile => {
None
}
}
}
}
impl ParseError {
/// Creates a [`ParseError::UnexpectedToken`] out of the provided [`Token`].
#[must_use]
pub fn unexpected_token(token: Token<'_>) -> Self {
use std::fmt::Write as _;
let mut s = String::new();
// PANIC: Unwrapping is OK here, as it may panic only on allocation
// error.
write!(s, "{token}").unwrap();
Self::UnexpectedToken(s)
}
}
#[doc(hidden)]
pub type ParseResult<T> = Result<Spanning<T>, Spanning<ParseError>>;
#[doc(hidden)]
pub type UnlocatedParseResult<T> = Result<T, Spanning<ParseError>>;
#[doc(hidden)]
pub type OptionParseResult<T> = Result<Option<Spanning<T>>, Spanning<ParseError>>;
#[doc(hidden)]
#[derive(Debug)]
pub struct Parser<'a> {
tokens: Vec<Spanning<Token<'a>>>,
}
impl<'a> Parser<'a> {
#[doc(hidden)]
pub fn new(lexer: &mut Lexer<'a>) -> Result<Parser<'a>, Spanning<LexerError>> {
let mut tokens = Vec::new();
for res in lexer {
match res {
Ok(s) => tokens.push(s),
Err(e) => return Err(e),
}
}
Ok(Parser { tokens })
}
#[doc(hidden)]
pub fn peek(&self) -> &Spanning<Token<'a>> {
&self.tokens[0]
}
#[doc(hidden)]
pub fn next_token(&mut self) -> ParseResult<Token<'a>> {
if self.tokens.len() == 1 {
Err(Spanning::start_end(
&self.peek().start,
&self.peek().end,
ParseError::UnexpectedEndOfFile,
))
} else {
Ok(self.tokens.remove(0))
}
}
#[doc(hidden)]
pub fn expect(&mut self, expected: &Token) -> ParseResult<Token<'a>> {
if &self.peek().item != expected {
Err(self.next_token()?.map(ParseError::unexpected_token))
} else {
self.next_token()
}
}
#[doc(hidden)]
pub fn skip(
&mut self,
expected: &Token,
) -> Result<Option<Spanning<Token<'a>>>, Spanning<ParseError>> {
if &self.peek().item == expected {
Ok(Some(self.next_token()?))
} else if self.peek().item == Token::EndOfFile {
Err(Spanning::zero_width(
&self.peek().start,
ParseError::UnexpectedEndOfFile,
))
} else {
Ok(None)
}
}
#[doc(hidden)]
pub fn delimited_list<T, F>(
&mut self,
opening: &Token,
parser: F,
closing: &Token,
) -> ParseResult<Vec<Spanning<T>>>
where
T: fmt::Debug,
F: Fn(&mut Parser<'a>) -> ParseResult<T>,
{
let Spanning {
start: start_pos, ..
} = self.expect(opening)?;
let mut items = Vec::new();
loop {
if let Some(Spanning { end: end_pos, .. }) = self.skip(closing)? {
return Ok(Spanning::start_end(&start_pos, &end_pos, items));
}
items.push(parser(self)?);
}
}
#[doc(hidden)]
pub fn delimited_nonempty_list<T, F>(
&mut self,
opening: &Token,
parser: F,
closing: &Token,
) -> ParseResult<Vec<Spanning<T>>>
where
T: fmt::Debug,
F: Fn(&mut Parser<'a>) -> ParseResult<T>,
{
let Spanning {
start: start_pos, ..
} = self.expect(opening)?;
let mut items = Vec::new();
loop {
items.push(parser(self)?);
if let Some(Spanning { end: end_pos, .. }) = self.skip(closing)? {
return Ok(Spanning::start_end(&start_pos, &end_pos, items));
}
}
}
#[doc(hidden)]
pub fn unlocated_delimited_nonempty_list<T, F>(
&mut self,
opening: &Token,
parser: F,
closing: &Token,
) -> ParseResult<Vec<T>>
where
T: fmt::Debug,
F: Fn(&mut Parser<'a>) -> UnlocatedParseResult<T>,
{
let Spanning {
start: start_pos, ..
} = self.expect(opening)?;
let mut items = Vec::new();
loop {
items.push(parser(self)?);
if let Some(Spanning { end: end_pos, .. }) = self.skip(closing)? {
return Ok(Spanning::start_end(&start_pos, &end_pos, items));
}
}
}
#[doc(hidden)]
pub fn expect_name(&mut self) -> ParseResult<&'a str> {
match *self.peek() {
Spanning {
item: Token::Name(_),
..
} => Ok(self.next_token()?.map(|token| {
if let Token::Name(name) = token {
name
} else {
panic!("Internal parse error in `expect_name`");
}
})),
Spanning {
item: Token::EndOfFile,
..
} => Err(Spanning::start_end(
&self.peek().start,
&self.peek().end,
ParseError::UnexpectedEndOfFile,
)),
_ => Err(self.next_token()?.map(ParseError::unexpected_token)),
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.